• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2018-2019  Realtek Corporation
3  */
4 
5 #include <linux/module.h>
6 #include "main.h"
7 #include "coex.h"
8 #include "fw.h"
9 #include "tx.h"
10 #include "rx.h"
11 #include "phy.h"
12 #include "rtw8822c.h"
13 #include "rtw8822c_table.h"
14 #include "mac.h"
15 #include "reg.h"
16 #include "debug.h"
17 #include "util.h"
18 #include "bf.h"
19 #include "efuse.h"
20 
21 #define IQK_DONE_8822C 0xaa
22 
23 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
24 				     u8 rx_path, bool is_tx2_path);
25 
rtw8822ce_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)26 static void rtw8822ce_efuse_parsing(struct rtw_efuse *efuse,
27 				    struct rtw8822c_efuse *map)
28 {
29 	ether_addr_copy(efuse->addr, map->e.mac_addr);
30 }
31 
rtw8822c_read_efuse(struct rtw_dev * rtwdev,u8 * log_map)32 static int rtw8822c_read_efuse(struct rtw_dev *rtwdev, u8 *log_map)
33 {
34 	struct rtw_efuse *efuse = &rtwdev->efuse;
35 	struct rtw8822c_efuse *map;
36 	int i;
37 
38 	map = (struct rtw8822c_efuse *)log_map;
39 
40 	efuse->rfe_option = map->rfe_option;
41 	efuse->rf_board_option = map->rf_board_option;
42 	efuse->crystal_cap = map->xtal_k & XCAP_MASK;
43 	efuse->channel_plan = map->channel_plan;
44 	efuse->country_code[0] = map->country_code[0];
45 	efuse->country_code[1] = map->country_code[1];
46 	efuse->bt_setting = map->rf_bt_setting;
47 	efuse->regd = map->rf_board_option & 0x7;
48 	efuse->thermal_meter[RF_PATH_A] = map->path_a_thermal;
49 	efuse->thermal_meter[RF_PATH_B] = map->path_b_thermal;
50 	efuse->thermal_meter_k =
51 			(map->path_a_thermal + map->path_b_thermal) >> 1;
52 	efuse->power_track_type = (map->tx_pwr_calibrate_rate >> 4) & 0xf;
53 
54 	for (i = 0; i < 4; i++)
55 		efuse->txpwr_idx_table[i] = map->txpwr_idx_table[i];
56 
57 	switch (rtw_hci_type(rtwdev)) {
58 	case RTW_HCI_TYPE_PCIE:
59 		rtw8822ce_efuse_parsing(efuse, map);
60 		break;
61 	default:
62 		/* unsupported now */
63 		return -ENOTSUPP;
64 	}
65 
66 	return 0;
67 }
68 
rtw8822c_header_file_init(struct rtw_dev * rtwdev,bool pre)69 static void rtw8822c_header_file_init(struct rtw_dev *rtwdev, bool pre)
70 {
71 	rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
72 	rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_PI_ON);
73 	rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
74 	rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_PI_ON);
75 
76 	if (pre)
77 		rtw_write32_clr(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
78 	else
79 		rtw_write32_set(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
80 }
81 
rtw8822c_bb_reset(struct rtw_dev * rtwdev)82 static void rtw8822c_bb_reset(struct rtw_dev *rtwdev)
83 {
84 	rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
85 	rtw_write16_clr(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
86 	rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
87 }
88 
rtw8822c_dac_backup_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)89 static void rtw8822c_dac_backup_reg(struct rtw_dev *rtwdev,
90 				    struct rtw_backup_info *backup,
91 				    struct rtw_backup_info *backup_rf)
92 {
93 	u32 path, i;
94 	u32 val;
95 	u32 reg;
96 	u32 rf_addr[DACK_RF_8822C] = {0x8f};
97 	u32 addrs[DACK_REG_8822C] = {0x180c, 0x1810, 0x410c, 0x4110,
98 				     0x1c3c, 0x1c24, 0x1d70, 0x9b4,
99 				     0x1a00, 0x1a14, 0x1d58, 0x1c38,
100 				     0x1e24, 0x1e28, 0x1860, 0x4160};
101 
102 	for (i = 0; i < DACK_REG_8822C; i++) {
103 		backup[i].len = 4;
104 		backup[i].reg = addrs[i];
105 		backup[i].val = rtw_read32(rtwdev, addrs[i]);
106 	}
107 
108 	for (path = 0; path < DACK_PATH_8822C; path++) {
109 		for (i = 0; i < DACK_RF_8822C; i++) {
110 			reg = rf_addr[i];
111 			val = rtw_read_rf(rtwdev, path, reg, RFREG_MASK);
112 			backup_rf[path * i + i].reg = reg;
113 			backup_rf[path * i + i].val = val;
114 		}
115 	}
116 }
117 
rtw8822c_dac_restore_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)118 static void rtw8822c_dac_restore_reg(struct rtw_dev *rtwdev,
119 				     struct rtw_backup_info *backup,
120 				     struct rtw_backup_info *backup_rf)
121 {
122 	u32 path, i;
123 	u32 val;
124 	u32 reg;
125 
126 	rtw_restore_reg(rtwdev, backup, DACK_REG_8822C);
127 
128 	for (path = 0; path < DACK_PATH_8822C; path++) {
129 		for (i = 0; i < DACK_RF_8822C; i++) {
130 			val = backup_rf[path * i + i].val;
131 			reg = backup_rf[path * i + i].reg;
132 			rtw_write_rf(rtwdev, path, reg, RFREG_MASK, val);
133 		}
134 	}
135 }
136 
rtw8822c_rf_minmax_cmp(struct rtw_dev * rtwdev,u32 value,u32 * min,u32 * max)137 static void rtw8822c_rf_minmax_cmp(struct rtw_dev *rtwdev, u32 value,
138 				   u32 *min, u32 *max)
139 {
140 	if (value >= 0x200) {
141 		if (*min >= 0x200) {
142 			if (*min > value)
143 				*min = value;
144 		} else {
145 			*min = value;
146 		}
147 		if (*max >= 0x200) {
148 			if (*max < value)
149 				*max = value;
150 		}
151 	} else {
152 		if (*min < 0x200) {
153 			if (*min > value)
154 				*min = value;
155 		}
156 
157 		if (*max  >= 0x200) {
158 			*max = value;
159 		} else {
160 			if (*max < value)
161 				*max = value;
162 		}
163 	}
164 }
165 
__rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * v1,u32 * v2)166 static void __rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *v1, u32 *v2)
167 {
168 	if (*v1 >= 0x200 && *v2 >= 0x200) {
169 		if (*v1 > *v2)
170 			swap(*v1, *v2);
171 	} else if (*v1 < 0x200 && *v2 < 0x200) {
172 		if (*v1 > *v2)
173 			swap(*v1, *v2);
174 	} else if (*v1 < 0x200 && *v2 >= 0x200) {
175 		swap(*v1, *v2);
176 	}
177 }
178 
rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)179 static void rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
180 {
181 	u32 i, j;
182 
183 	for (i = 0; i < DACK_SN_8822C - 1; i++) {
184 		for (j = 0; j < (DACK_SN_8822C - 1 - i) ; j++) {
185 			__rtw8822c_dac_iq_sort(rtwdev, &iv[j], &iv[j + 1]);
186 			__rtw8822c_dac_iq_sort(rtwdev, &qv[j], &qv[j + 1]);
187 		}
188 	}
189 }
190 
rtw8822c_dac_iq_offset(struct rtw_dev * rtwdev,u32 * vec,u32 * val)191 static void rtw8822c_dac_iq_offset(struct rtw_dev *rtwdev, u32 *vec, u32 *val)
192 {
193 	u32 p, m, t, i;
194 
195 	m = 0;
196 	p = 0;
197 	for (i = 10; i < DACK_SN_8822C - 10; i++) {
198 		if (vec[i] > 0x200)
199 			m = (0x400 - vec[i]) + m;
200 		else
201 			p = vec[i] + p;
202 	}
203 
204 	if (p > m) {
205 		t = p - m;
206 		t = t / (DACK_SN_8822C - 20);
207 	} else {
208 		t = m - p;
209 		t = t / (DACK_SN_8822C - 20);
210 		if (t != 0x0)
211 			t = 0x400 - t;
212 	}
213 
214 	*val = t;
215 }
216 
rtw8822c_get_path_write_addr(u8 path)217 static u32 rtw8822c_get_path_write_addr(u8 path)
218 {
219 	u32 base_addr;
220 
221 	switch (path) {
222 	case RF_PATH_A:
223 		base_addr = 0x1800;
224 		break;
225 	case RF_PATH_B:
226 		base_addr = 0x4100;
227 		break;
228 	default:
229 		WARN_ON(1);
230 		return -1;
231 	}
232 
233 	return base_addr;
234 }
235 
rtw8822c_get_path_read_addr(u8 path)236 static u32 rtw8822c_get_path_read_addr(u8 path)
237 {
238 	u32 base_addr;
239 
240 	switch (path) {
241 	case RF_PATH_A:
242 		base_addr = 0x2800;
243 		break;
244 	case RF_PATH_B:
245 		base_addr = 0x4500;
246 		break;
247 	default:
248 		WARN_ON(1);
249 		return -1;
250 	}
251 
252 	return base_addr;
253 }
254 
rtw8822c_dac_iq_check(struct rtw_dev * rtwdev,u32 value)255 static bool rtw8822c_dac_iq_check(struct rtw_dev *rtwdev, u32 value)
256 {
257 	bool ret = true;
258 
259 	if ((value >= 0x200 && (0x400 - value) > 0x64) ||
260 	    (value < 0x200 && value > 0x64)) {
261 		ret = false;
262 		rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] Error overflow\n");
263 	}
264 
265 	return ret;
266 }
267 
rtw8822c_dac_cal_iq_sample(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)268 static void rtw8822c_dac_cal_iq_sample(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
269 {
270 	u32 temp;
271 	int i = 0, cnt = 0;
272 
273 	while (i < DACK_SN_8822C && cnt < 10000) {
274 		cnt++;
275 		temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
276 		iv[i] = (temp & 0x3ff000) >> 12;
277 		qv[i] = temp & 0x3ff;
278 
279 		if (rtw8822c_dac_iq_check(rtwdev, iv[i]) &&
280 		    rtw8822c_dac_iq_check(rtwdev, qv[i]))
281 			i++;
282 	}
283 }
284 
rtw8822c_dac_cal_iq_search(struct rtw_dev * rtwdev,u32 * iv,u32 * qv,u32 * i_value,u32 * q_value)285 static void rtw8822c_dac_cal_iq_search(struct rtw_dev *rtwdev,
286 				       u32 *iv, u32 *qv,
287 				       u32 *i_value, u32 *q_value)
288 {
289 	u32 i_max = 0, q_max = 0, i_min = 0, q_min = 0;
290 	u32 i_delta, q_delta;
291 	u32 temp;
292 	int i, cnt = 0;
293 
294 	do {
295 		i_min = iv[0];
296 		i_max = iv[0];
297 		q_min = qv[0];
298 		q_max = qv[0];
299 		for (i = 0; i < DACK_SN_8822C; i++) {
300 			rtw8822c_rf_minmax_cmp(rtwdev, iv[i], &i_min, &i_max);
301 			rtw8822c_rf_minmax_cmp(rtwdev, qv[i], &q_min, &q_max);
302 		}
303 
304 		if (i_max < 0x200 && i_min < 0x200)
305 			i_delta = i_max - i_min;
306 		else if (i_max >= 0x200 && i_min >= 0x200)
307 			i_delta = i_max - i_min;
308 		else
309 			i_delta = i_max + (0x400 - i_min);
310 
311 		if (q_max < 0x200 && q_min < 0x200)
312 			q_delta = q_max - q_min;
313 		else if (q_max >= 0x200 && q_min >= 0x200)
314 			q_delta = q_max - q_min;
315 		else
316 			q_delta = q_max + (0x400 - q_min);
317 
318 		rtw_dbg(rtwdev, RTW_DBG_RFK,
319 			"[DACK] i: min=0x%08x, max=0x%08x, delta=0x%08x\n",
320 			i_min, i_max, i_delta);
321 		rtw_dbg(rtwdev, RTW_DBG_RFK,
322 			"[DACK] q: min=0x%08x, max=0x%08x, delta=0x%08x\n",
323 			q_min, q_max, q_delta);
324 
325 		rtw8822c_dac_iq_sort(rtwdev, iv, qv);
326 
327 		if (i_delta > 5 || q_delta > 5) {
328 			temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
329 			iv[0] = (temp & 0x3ff000) >> 12;
330 			qv[0] = temp & 0x3ff;
331 			temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
332 			iv[DACK_SN_8822C - 1] = (temp & 0x3ff000) >> 12;
333 			qv[DACK_SN_8822C - 1] = temp & 0x3ff;
334 		} else {
335 			break;
336 		}
337 	} while (cnt++ < 100);
338 
339 	rtw8822c_dac_iq_offset(rtwdev, iv, i_value);
340 	rtw8822c_dac_iq_offset(rtwdev, qv, q_value);
341 }
342 
rtw8822c_dac_cal_rf_mode(struct rtw_dev * rtwdev,u32 * i_value,u32 * q_value)343 static void rtw8822c_dac_cal_rf_mode(struct rtw_dev *rtwdev,
344 				     u32 *i_value, u32 *q_value)
345 {
346 	u32 iv[DACK_SN_8822C], qv[DACK_SN_8822C];
347 	u32 rf_a, rf_b;
348 
349 	rf_a = rtw_read_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK);
350 	rf_b = rtw_read_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK);
351 
352 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-A=0x%05x\n", rf_a);
353 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-B=0x%05x\n", rf_b);
354 
355 	rtw8822c_dac_cal_iq_sample(rtwdev, iv, qv);
356 	rtw8822c_dac_cal_iq_search(rtwdev, iv, qv, i_value, q_value);
357 }
358 
rtw8822c_dac_bb_setting(struct rtw_dev * rtwdev)359 static void rtw8822c_dac_bb_setting(struct rtw_dev *rtwdev)
360 {
361 	rtw_write32_mask(rtwdev, 0x1d58, 0xff8, 0x1ff);
362 	rtw_write32_mask(rtwdev, 0x1a00, 0x3, 0x2);
363 	rtw_write32_mask(rtwdev, 0x1a14, 0x300, 0x3);
364 	rtw_write32(rtwdev, 0x1d70, 0x7e7e7e7e);
365 	rtw_write32_mask(rtwdev, 0x180c, 0x3, 0x0);
366 	rtw_write32_mask(rtwdev, 0x410c, 0x3, 0x0);
367 	rtw_write32(rtwdev, 0x1b00, 0x00000008);
368 	rtw_write8(rtwdev, 0x1bcc, 0x3f);
369 	rtw_write32(rtwdev, 0x1b00, 0x0000000a);
370 	rtw_write8(rtwdev, 0x1bcc, 0x3f);
371 	rtw_write32_mask(rtwdev, 0x1e24, BIT(31), 0x0);
372 	rtw_write32_mask(rtwdev, 0x1e28, 0xf, 0x3);
373 }
374 
rtw8822c_dac_cal_adc(struct rtw_dev * rtwdev,u8 path,u32 * adc_ic,u32 * adc_qc)375 static void rtw8822c_dac_cal_adc(struct rtw_dev *rtwdev,
376 				 u8 path, u32 *adc_ic, u32 *adc_qc)
377 {
378 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
379 	u32 ic = 0, qc = 0, temp = 0;
380 	u32 base_addr;
381 	u32 path_sel;
382 	int i;
383 
384 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK path(%d)\n", path);
385 
386 	base_addr = rtw8822c_get_path_write_addr(path);
387 	switch (path) {
388 	case RF_PATH_A:
389 		path_sel = 0xa0000;
390 		break;
391 	case RF_PATH_B:
392 		path_sel = 0x80000;
393 		break;
394 	default:
395 		WARN_ON(1);
396 		return;
397 	}
398 
399 	/* ADCK step1 */
400 	rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x0);
401 	if (path == RF_PATH_B)
402 		rtw_write32(rtwdev, base_addr + 0x30, 0x30db8041);
403 	rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
404 	rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
405 	rtw_write32(rtwdev, base_addr + 0x10, 0x02dd08c4);
406 	rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
407 	rtw_write_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK, 0x10000);
408 	rtw_write_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK, 0x10000);
409 	for (i = 0; i < 10; i++) {
410 		rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK count=%d\n", i);
411 		rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8003);
412 		rtw_write32(rtwdev, 0x1c24, 0x00010002);
413 		rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
414 		rtw_dbg(rtwdev, RTW_DBG_RFK,
415 			"[DACK] before: i=0x%x, q=0x%x\n", ic, qc);
416 
417 		/* compensation value */
418 		if (ic != 0x0) {
419 			ic = 0x400 - ic;
420 			*adc_ic = ic;
421 		}
422 		if (qc != 0x0) {
423 			qc = 0x400 - qc;
424 			*adc_qc = qc;
425 		}
426 		temp = (ic & 0x3ff) | ((qc & 0x3ff) << 10);
427 		rtw_write32(rtwdev, base_addr + 0x68, temp);
428 		dm_info->dack_adck[path] = temp;
429 		rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK 0x%08x=0x08%x\n",
430 			base_addr + 0x68, temp);
431 		/* check ADC DC offset */
432 		rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8103);
433 		rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
434 		rtw_dbg(rtwdev, RTW_DBG_RFK,
435 			"[DACK] after:  i=0x%08x, q=0x%08x\n", ic, qc);
436 		if (ic >= 0x200)
437 			ic = 0x400 - ic;
438 		if (qc >= 0x200)
439 			qc = 0x400 - qc;
440 		if (ic < 5 && qc < 5)
441 			break;
442 	}
443 
444 	/* ADCK step2 */
445 	rtw_write32(rtwdev, 0x1c3c, 0x00000003);
446 	rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
447 	rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
448 
449 	/* release pull low switch on IQ path */
450 	rtw_write_rf(rtwdev, path, 0x8f, BIT(13), 0x1);
451 }
452 
rtw8822c_dac_cal_step1(struct rtw_dev * rtwdev,u8 path)453 static void rtw8822c_dac_cal_step1(struct rtw_dev *rtwdev, u8 path)
454 {
455 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
456 	u32 base_addr;
457 	u32 read_addr;
458 
459 	base_addr = rtw8822c_get_path_write_addr(path);
460 	read_addr = rtw8822c_get_path_read_addr(path);
461 
462 	rtw_write32(rtwdev, base_addr + 0x68, dm_info->dack_adck[path]);
463 	rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
464 	if (path == RF_PATH_A) {
465 		rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
466 		rtw_write32(rtwdev, 0x1c38, 0xffffffff);
467 	}
468 	rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
469 	rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
470 	rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
471 	rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff81);
472 	rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
473 	rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
474 	rtw_write32(rtwdev, base_addr + 0xd8, 0x0008ff81);
475 	rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
476 	rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
477 	mdelay(2);
478 	rtw_write32(rtwdev, base_addr + 0xbc, 0x000aff8d);
479 	mdelay(2);
480 	rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
481 	rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
482 	mdelay(1);
483 	rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
484 	rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
485 	mdelay(20);
486 	if (!check_hw_ready(rtwdev, read_addr + 0x08, 0x7fff80, 0xffff) ||
487 	    !check_hw_ready(rtwdev, read_addr + 0x34, 0x7fff80, 0xffff))
488 		rtw_err(rtwdev, "failed to wait for dack ready\n");
489 	rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
490 	mdelay(1);
491 	rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
492 	rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
493 	rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
494 	rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
495 	rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
496 }
497 
rtw8822c_dac_cal_step2(struct rtw_dev * rtwdev,u8 path,u32 * ic_out,u32 * qc_out)498 static void rtw8822c_dac_cal_step2(struct rtw_dev *rtwdev,
499 				   u8 path, u32 *ic_out, u32 *qc_out)
500 {
501 	u32 base_addr;
502 	u32 ic, qc, ic_in, qc_in;
503 
504 	base_addr = rtw8822c_get_path_write_addr(path);
505 	rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, 0x0);
506 	rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, 0x8);
507 	rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, 0x0);
508 	rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, 0x8);
509 
510 	rtw_write32(rtwdev, 0x1b00, 0x00000008);
511 	rtw_write8(rtwdev, 0x1bcc, 0x03f);
512 	rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
513 	rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
514 	rtw_write32(rtwdev, 0x1c3c, 0x00088103);
515 
516 	rtw8822c_dac_cal_rf_mode(rtwdev, &ic_in, &qc_in);
517 	ic = ic_in;
518 	qc = qc_in;
519 
520 	/* compensation value */
521 	if (ic != 0x0)
522 		ic = 0x400 - ic;
523 	if (qc != 0x0)
524 		qc = 0x400 - qc;
525 	if (ic < 0x300) {
526 		ic = ic * 2 * 6 / 5;
527 		ic = ic + 0x80;
528 	} else {
529 		ic = (0x400 - ic) * 2 * 6 / 5;
530 		ic = 0x7f - ic;
531 	}
532 	if (qc < 0x300) {
533 		qc = qc * 2 * 6 / 5;
534 		qc = qc + 0x80;
535 	} else {
536 		qc = (0x400 - qc) * 2 * 6 / 5;
537 		qc = 0x7f - qc;
538 	}
539 
540 	*ic_out = ic;
541 	*qc_out = qc;
542 
543 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] before i=0x%x, q=0x%x\n", ic_in, qc_in);
544 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] after  i=0x%x, q=0x%x\n", ic, qc);
545 }
546 
rtw8822c_dac_cal_step3(struct rtw_dev * rtwdev,u8 path,u32 adc_ic,u32 adc_qc,u32 * ic_in,u32 * qc_in,u32 * i_out,u32 * q_out)547 static void rtw8822c_dac_cal_step3(struct rtw_dev *rtwdev, u8 path,
548 				   u32 adc_ic, u32 adc_qc,
549 				   u32 *ic_in, u32 *qc_in,
550 				   u32 *i_out, u32 *q_out)
551 {
552 	u32 base_addr;
553 	u32 read_addr;
554 	u32 ic, qc;
555 	u32 temp;
556 
557 	base_addr = rtw8822c_get_path_write_addr(path);
558 	read_addr = rtw8822c_get_path_read_addr(path);
559 	ic = *ic_in;
560 	qc = *qc_in;
561 
562 	rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
563 	rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
564 	rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
565 	rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
566 	rtw_write32(rtwdev, base_addr + 0xbc, 0xc008ff81);
567 	rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
568 	rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, ic & 0xf);
569 	rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, (ic & 0xf0) >> 4);
570 	rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
571 	rtw_write32(rtwdev, base_addr + 0xd8, 0xe008ff81);
572 	rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
573 	rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, qc & 0xf);
574 	rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, (qc & 0xf0) >> 4);
575 	rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
576 	mdelay(2);
577 	rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x6);
578 	mdelay(2);
579 	rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
580 	rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
581 	mdelay(1);
582 	rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
583 	rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
584 	mdelay(20);
585 	if (!check_hw_ready(rtwdev, read_addr + 0x24, 0x07f80000, ic) ||
586 	    !check_hw_ready(rtwdev, read_addr + 0x50, 0x07f80000, qc))
587 		rtw_err(rtwdev, "failed to write IQ vector to hardware\n");
588 	rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
589 	mdelay(1);
590 	rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x3);
591 	rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
592 
593 	/* check DAC DC offset */
594 	temp = ((adc_ic + 0x10) & 0x3ff) | (((adc_qc + 0x10) & 0x3ff) << 10);
595 	rtw_write32(rtwdev, base_addr + 0x68, temp);
596 	rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
597 	rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
598 	rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
599 	if (ic >= 0x10)
600 		ic = ic - 0x10;
601 	else
602 		ic = 0x400 - (0x10 - ic);
603 
604 	if (qc >= 0x10)
605 		qc = qc - 0x10;
606 	else
607 		qc = 0x400 - (0x10 - qc);
608 
609 	*i_out = ic;
610 	*q_out = qc;
611 
612 	if (ic >= 0x200)
613 		ic = 0x400 - ic;
614 	if (qc >= 0x200)
615 		qc = 0x400 - qc;
616 
617 	*ic_in = ic;
618 	*qc_in = qc;
619 
620 	rtw_dbg(rtwdev, RTW_DBG_RFK,
621 		"[DACK] after  DACK i=0x%x, q=0x%x\n", *i_out, *q_out);
622 }
623 
rtw8822c_dac_cal_step4(struct rtw_dev * rtwdev,u8 path)624 static void rtw8822c_dac_cal_step4(struct rtw_dev *rtwdev, u8 path)
625 {
626 	u32 base_addr = rtw8822c_get_path_write_addr(path);
627 
628 	rtw_write32(rtwdev, base_addr + 0x68, 0x0);
629 	rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
630 	rtw_write32_mask(rtwdev, base_addr + 0xbc, 0x1, 0x0);
631 	rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x1);
632 }
633 
rtw8822c_dac_cal_backup_vec(struct rtw_dev * rtwdev,u8 path,u8 vec,u32 w_addr,u32 r_addr)634 static void rtw8822c_dac_cal_backup_vec(struct rtw_dev *rtwdev,
635 					u8 path, u8 vec, u32 w_addr, u32 r_addr)
636 {
637 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
638 	u16 val;
639 	u32 i;
640 
641 	if (WARN_ON(vec >= 2))
642 		return;
643 
644 	for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
645 		rtw_write32_mask(rtwdev, w_addr, 0xf0000000, i);
646 		val = (u16)rtw_read32_mask(rtwdev, r_addr, 0x7fc0000);
647 		dm_info->dack_msbk[path][vec][i] = val;
648 	}
649 }
650 
rtw8822c_dac_cal_backup_path(struct rtw_dev * rtwdev,u8 path)651 static void rtw8822c_dac_cal_backup_path(struct rtw_dev *rtwdev, u8 path)
652 {
653 	u32 w_off = 0x1c;
654 	u32 r_off = 0x2c;
655 	u32 w_addr, r_addr;
656 
657 	if (WARN_ON(path >= 2))
658 		return;
659 
660 	/* backup I vector */
661 	w_addr = rtw8822c_get_path_write_addr(path) + 0xb0;
662 	r_addr = rtw8822c_get_path_read_addr(path) + 0x10;
663 	rtw8822c_dac_cal_backup_vec(rtwdev, path, 0, w_addr, r_addr);
664 
665 	/* backup Q vector */
666 	w_addr = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
667 	r_addr = rtw8822c_get_path_read_addr(path) + 0x10 + r_off;
668 	rtw8822c_dac_cal_backup_vec(rtwdev, path, 1, w_addr, r_addr);
669 }
670 
rtw8822c_dac_cal_backup_dck(struct rtw_dev * rtwdev)671 static void rtw8822c_dac_cal_backup_dck(struct rtw_dev *rtwdev)
672 {
673 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
674 	u8 val;
675 
676 	val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000);
677 	dm_info->dack_dck[RF_PATH_A][0][0] = val;
678 	val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_1, 0xf);
679 	dm_info->dack_dck[RF_PATH_A][0][1] = val;
680 	val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000);
681 	dm_info->dack_dck[RF_PATH_A][1][0] = val;
682 	val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_1, 0xf);
683 	dm_info->dack_dck[RF_PATH_A][1][1] = val;
684 
685 	val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000);
686 	dm_info->dack_dck[RF_PATH_B][0][0] = val;
687 	val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_1, 0xf);
688 	dm_info->dack_dck[RF_PATH_B][1][0] = val;
689 	val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000);
690 	dm_info->dack_dck[RF_PATH_B][0][1] = val;
691 	val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_1, 0xf);
692 	dm_info->dack_dck[RF_PATH_B][1][1] = val;
693 }
694 
rtw8822c_dac_cal_backup(struct rtw_dev * rtwdev)695 static void rtw8822c_dac_cal_backup(struct rtw_dev *rtwdev)
696 {
697 	u32 temp[3];
698 
699 	temp[0] = rtw_read32(rtwdev, 0x1860);
700 	temp[1] = rtw_read32(rtwdev, 0x4160);
701 	temp[2] = rtw_read32(rtwdev, 0x9b4);
702 
703 	/* set clock */
704 	rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
705 
706 	/* backup path-A I/Q */
707 	rtw_write32_clr(rtwdev, 0x1830, BIT(30));
708 	rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
709 	rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_A);
710 
711 	/* backup path-B I/Q */
712 	rtw_write32_clr(rtwdev, 0x4130, BIT(30));
713 	rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
714 	rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_B);
715 
716 	rtw8822c_dac_cal_backup_dck(rtwdev);
717 	rtw_write32_set(rtwdev, 0x1830, BIT(30));
718 	rtw_write32_set(rtwdev, 0x4130, BIT(30));
719 
720 	rtw_write32(rtwdev, 0x1860, temp[0]);
721 	rtw_write32(rtwdev, 0x4160, temp[1]);
722 	rtw_write32(rtwdev, 0x9b4, temp[2]);
723 }
724 
rtw8822c_dac_cal_restore_dck(struct rtw_dev * rtwdev)725 static void rtw8822c_dac_cal_restore_dck(struct rtw_dev *rtwdev)
726 {
727 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
728 	u8 val;
729 
730 	rtw_write32_set(rtwdev, REG_DCKA_I_0, BIT(19));
731 	val = dm_info->dack_dck[RF_PATH_A][0][0];
732 	rtw_write32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000, val);
733 	val = dm_info->dack_dck[RF_PATH_A][0][1];
734 	rtw_write32_mask(rtwdev, REG_DCKA_I_1, 0xf, val);
735 
736 	rtw_write32_set(rtwdev, REG_DCKA_Q_0, BIT(19));
737 	val = dm_info->dack_dck[RF_PATH_A][1][0];
738 	rtw_write32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000, val);
739 	val = dm_info->dack_dck[RF_PATH_A][1][1];
740 	rtw_write32_mask(rtwdev, REG_DCKA_Q_1, 0xf, val);
741 
742 	rtw_write32_set(rtwdev, REG_DCKB_I_0, BIT(19));
743 	val = dm_info->dack_dck[RF_PATH_B][0][0];
744 	rtw_write32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000, val);
745 	val = dm_info->dack_dck[RF_PATH_B][0][1];
746 	rtw_write32_mask(rtwdev, REG_DCKB_I_1, 0xf, val);
747 
748 	rtw_write32_set(rtwdev, REG_DCKB_Q_0, BIT(19));
749 	val = dm_info->dack_dck[RF_PATH_B][1][0];
750 	rtw_write32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000, val);
751 	val = dm_info->dack_dck[RF_PATH_B][1][1];
752 	rtw_write32_mask(rtwdev, REG_DCKB_Q_1, 0xf, val);
753 }
754 
rtw8822c_dac_cal_restore_prepare(struct rtw_dev * rtwdev)755 static void rtw8822c_dac_cal_restore_prepare(struct rtw_dev *rtwdev)
756 {
757 	rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
758 
759 	rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x0);
760 	rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x0);
761 	rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x0);
762 	rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x0);
763 
764 	rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x0);
765 	rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
766 	rtw_write32_mask(rtwdev, 0x18b4, BIT(0), 0x1);
767 	rtw_write32_mask(rtwdev, 0x18d0, BIT(0), 0x1);
768 
769 	rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x0);
770 	rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
771 	rtw_write32_mask(rtwdev, 0x41b4, BIT(0), 0x1);
772 	rtw_write32_mask(rtwdev, 0x41d0, BIT(0), 0x1);
773 
774 	rtw_write32_mask(rtwdev, 0x18b0, 0xf00, 0x0);
775 	rtw_write32_mask(rtwdev, 0x18c0, BIT(14), 0x0);
776 	rtw_write32_mask(rtwdev, 0x18cc, 0xf00, 0x0);
777 	rtw_write32_mask(rtwdev, 0x18dc, BIT(14), 0x0);
778 
779 	rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x0);
780 	rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x0);
781 	rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x1);
782 	rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x1);
783 
784 	rtw8822c_dac_cal_restore_dck(rtwdev);
785 
786 	rtw_write32_mask(rtwdev, 0x18c0, 0x38000, 0x7);
787 	rtw_write32_mask(rtwdev, 0x18dc, 0x38000, 0x7);
788 	rtw_write32_mask(rtwdev, 0x41c0, 0x38000, 0x7);
789 	rtw_write32_mask(rtwdev, 0x41dc, 0x38000, 0x7);
790 
791 	rtw_write32_mask(rtwdev, 0x18b8, BIT(26) | BIT(25), 0x1);
792 	rtw_write32_mask(rtwdev, 0x18d4, BIT(26) | BIT(25), 0x1);
793 
794 	rtw_write32_mask(rtwdev, 0x41b0, 0xf00, 0x0);
795 	rtw_write32_mask(rtwdev, 0x41c0, BIT(14), 0x0);
796 	rtw_write32_mask(rtwdev, 0x41cc, 0xf00, 0x0);
797 	rtw_write32_mask(rtwdev, 0x41dc, BIT(14), 0x0);
798 
799 	rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x0);
800 	rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x0);
801 	rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x1);
802 	rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x1);
803 
804 	rtw_write32_mask(rtwdev, 0x41b8, BIT(26) | BIT(25), 0x1);
805 	rtw_write32_mask(rtwdev, 0x41d4, BIT(26) | BIT(25), 0x1);
806 }
807 
rtw8822c_dac_cal_restore_wait(struct rtw_dev * rtwdev,u32 target_addr,u32 toggle_addr)808 static bool rtw8822c_dac_cal_restore_wait(struct rtw_dev *rtwdev,
809 					  u32 target_addr, u32 toggle_addr)
810 {
811 	u32 cnt = 0;
812 
813 	do {
814 		rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x0);
815 		rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x2);
816 
817 		if (rtw_read32_mask(rtwdev, target_addr, 0xf) == 0x6)
818 			return true;
819 
820 	} while (cnt++ < 100);
821 
822 	return false;
823 }
824 
rtw8822c_dac_cal_restore_path(struct rtw_dev * rtwdev,u8 path)825 static bool rtw8822c_dac_cal_restore_path(struct rtw_dev *rtwdev, u8 path)
826 {
827 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
828 	u32 w_off = 0x1c;
829 	u32 r_off = 0x2c;
830 	u32 w_i, r_i, w_q, r_q;
831 	u32 value;
832 	u32 i;
833 
834 	w_i = rtw8822c_get_path_write_addr(path) + 0xb0;
835 	r_i = rtw8822c_get_path_read_addr(path) + 0x08;
836 	w_q = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
837 	r_q = rtw8822c_get_path_read_addr(path) + 0x08 + r_off;
838 
839 	if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_i, w_i + 0x8))
840 		return false;
841 
842 	for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
843 		rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
844 		value = dm_info->dack_msbk[path][0][i];
845 		rtw_write32_mask(rtwdev, w_i + 0x4, 0xff8, value);
846 		rtw_write32_mask(rtwdev, w_i, 0xf0000000, i);
847 		rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x1);
848 	}
849 
850 	rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
851 
852 	if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_q, w_q + 0x8))
853 		return false;
854 
855 	for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
856 		rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
857 		value = dm_info->dack_msbk[path][1][i];
858 		rtw_write32_mask(rtwdev, w_q + 0x4, 0xff8, value);
859 		rtw_write32_mask(rtwdev, w_q, 0xf0000000, i);
860 		rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x1);
861 	}
862 	rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
863 
864 	rtw_write32_mask(rtwdev, w_i + 0x8, BIT(26) | BIT(25), 0x0);
865 	rtw_write32_mask(rtwdev, w_q + 0x8, BIT(26) | BIT(25), 0x0);
866 	rtw_write32_mask(rtwdev, w_i + 0x4, BIT(0), 0x0);
867 	rtw_write32_mask(rtwdev, w_q + 0x4, BIT(0), 0x0);
868 
869 	return true;
870 }
871 
__rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)872 static bool __rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
873 {
874 	if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_A))
875 		return false;
876 
877 	if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_B))
878 		return false;
879 
880 	return true;
881 }
882 
rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)883 static bool rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
884 {
885 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
886 	u32 temp[3];
887 
888 	/* sample the first element for both path's IQ vector */
889 	if (dm_info->dack_msbk[RF_PATH_A][0][0] == 0 &&
890 	    dm_info->dack_msbk[RF_PATH_A][1][0] == 0 &&
891 	    dm_info->dack_msbk[RF_PATH_B][0][0] == 0 &&
892 	    dm_info->dack_msbk[RF_PATH_B][1][0] == 0)
893 		return false;
894 
895 	temp[0] = rtw_read32(rtwdev, 0x1860);
896 	temp[1] = rtw_read32(rtwdev, 0x4160);
897 	temp[2] = rtw_read32(rtwdev, 0x9b4);
898 
899 	rtw8822c_dac_cal_restore_prepare(rtwdev);
900 	if (!check_hw_ready(rtwdev, 0x2808, 0x7fff80, 0xffff) ||
901 	    !check_hw_ready(rtwdev, 0x2834, 0x7fff80, 0xffff) ||
902 	    !check_hw_ready(rtwdev, 0x4508, 0x7fff80, 0xffff) ||
903 	    !check_hw_ready(rtwdev, 0x4534, 0x7fff80, 0xffff))
904 		return false;
905 
906 	if (!__rtw8822c_dac_cal_restore(rtwdev)) {
907 		rtw_err(rtwdev, "failed to restore dack vectors\n");
908 		return false;
909 	}
910 
911 	rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x1);
912 	rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
913 	rtw_write32(rtwdev, 0x1860, temp[0]);
914 	rtw_write32(rtwdev, 0x4160, temp[1]);
915 	rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x1);
916 	rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x1);
917 	rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x1);
918 	rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x1);
919 	rtw_write32(rtwdev, 0x9b4, temp[2]);
920 
921 	return true;
922 }
923 
rtw8822c_rf_dac_cal(struct rtw_dev * rtwdev)924 static void rtw8822c_rf_dac_cal(struct rtw_dev *rtwdev)
925 {
926 	struct rtw_backup_info backup_rf[DACK_RF_8822C * DACK_PATH_8822C];
927 	struct rtw_backup_info backup[DACK_REG_8822C];
928 	u32 ic = 0, qc = 0, i;
929 	u32 i_a = 0x0, q_a = 0x0, i_b = 0x0, q_b = 0x0;
930 	u32 ic_a = 0x0, qc_a = 0x0, ic_b = 0x0, qc_b = 0x0;
931 	u32 adc_ic_a = 0x0, adc_qc_a = 0x0, adc_ic_b = 0x0, adc_qc_b = 0x0;
932 
933 	if (rtw8822c_dac_cal_restore(rtwdev))
934 		return;
935 
936 	/* not able to restore, do it */
937 
938 	rtw8822c_dac_backup_reg(rtwdev, backup, backup_rf);
939 
940 	rtw8822c_dac_bb_setting(rtwdev);
941 
942 	/* path-A */
943 	rtw8822c_dac_cal_adc(rtwdev, RF_PATH_A, &adc_ic_a, &adc_qc_a);
944 	for (i = 0; i < 10; i++) {
945 		rtw8822c_dac_cal_step1(rtwdev, RF_PATH_A);
946 		rtw8822c_dac_cal_step2(rtwdev, RF_PATH_A, &ic, &qc);
947 		ic_a = ic;
948 		qc_a = qc;
949 
950 		rtw8822c_dac_cal_step3(rtwdev, RF_PATH_A, adc_ic_a, adc_qc_a,
951 				       &ic, &qc, &i_a, &q_a);
952 
953 		if (ic < 5 && qc < 5)
954 			break;
955 	}
956 	rtw8822c_dac_cal_step4(rtwdev, RF_PATH_A);
957 
958 	/* path-B */
959 	rtw8822c_dac_cal_adc(rtwdev, RF_PATH_B, &adc_ic_b, &adc_qc_b);
960 	for (i = 0; i < 10; i++) {
961 		rtw8822c_dac_cal_step1(rtwdev, RF_PATH_B);
962 		rtw8822c_dac_cal_step2(rtwdev, RF_PATH_B, &ic, &qc);
963 		ic_b = ic;
964 		qc_b = qc;
965 
966 		rtw8822c_dac_cal_step3(rtwdev, RF_PATH_B, adc_ic_b, adc_qc_b,
967 				       &ic, &qc, &i_b, &q_b);
968 
969 		if (ic < 5 && qc < 5)
970 			break;
971 	}
972 	rtw8822c_dac_cal_step4(rtwdev, RF_PATH_B);
973 
974 	rtw_write32(rtwdev, 0x1b00, 0x00000008);
975 	rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
976 	rtw_write8(rtwdev, 0x1bcc, 0x0);
977 	rtw_write32(rtwdev, 0x1b00, 0x0000000a);
978 	rtw_write8(rtwdev, 0x1bcc, 0x0);
979 
980 	rtw8822c_dac_restore_reg(rtwdev, backup, backup_rf);
981 
982 	/* backup results to restore, saving a lot of time */
983 	rtw8822c_dac_cal_backup(rtwdev);
984 
985 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: ic=0x%x, qc=0x%x\n", ic_a, qc_a);
986 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: ic=0x%x, qc=0x%x\n", ic_b, qc_b);
987 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: i=0x%x, q=0x%x\n", i_a, q_a);
988 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: i=0x%x, q=0x%x\n", i_b, q_b);
989 }
990 
rtw8822c_rf_x2_check(struct rtw_dev * rtwdev)991 static void rtw8822c_rf_x2_check(struct rtw_dev *rtwdev)
992 {
993 	u8 x2k_busy;
994 
995 	mdelay(1);
996 	x2k_busy = rtw_read_rf(rtwdev, RF_PATH_A, 0xb8, BIT(15));
997 	if (x2k_busy == 1) {
998 		rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0xC4440);
999 		rtw_write_rf(rtwdev, RF_PATH_A, 0xba, RFREG_MASK, 0x6840D);
1000 		rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0x80440);
1001 		mdelay(1);
1002 	}
1003 }
1004 
rtw8822c_set_power_trim(struct rtw_dev * rtwdev,s8 bb_gain[2][8])1005 static void rtw8822c_set_power_trim(struct rtw_dev *rtwdev, s8 bb_gain[2][8])
1006 {
1007 #define RF_SET_POWER_TRIM(_path, _seq, _idx)					\
1008 		do {								\
1009 			rtw_write_rf(rtwdev, _path, 0x33, RFREG_MASK, _seq);	\
1010 			rtw_write_rf(rtwdev, _path, 0x3f, RFREG_MASK,		\
1011 				     bb_gain[_path][_idx]);			\
1012 		} while (0)
1013 	u8 path;
1014 
1015 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1016 		rtw_write_rf(rtwdev, path, 0xee, BIT(19), 1);
1017 		RF_SET_POWER_TRIM(path, 0x0, 0);
1018 		RF_SET_POWER_TRIM(path, 0x1, 1);
1019 		RF_SET_POWER_TRIM(path, 0x2, 2);
1020 		RF_SET_POWER_TRIM(path, 0x3, 2);
1021 		RF_SET_POWER_TRIM(path, 0x4, 3);
1022 		RF_SET_POWER_TRIM(path, 0x5, 4);
1023 		RF_SET_POWER_TRIM(path, 0x6, 5);
1024 		RF_SET_POWER_TRIM(path, 0x7, 6);
1025 		RF_SET_POWER_TRIM(path, 0x8, 7);
1026 		RF_SET_POWER_TRIM(path, 0x9, 3);
1027 		RF_SET_POWER_TRIM(path, 0xa, 4);
1028 		RF_SET_POWER_TRIM(path, 0xb, 5);
1029 		RF_SET_POWER_TRIM(path, 0xc, 6);
1030 		RF_SET_POWER_TRIM(path, 0xd, 7);
1031 		RF_SET_POWER_TRIM(path, 0xe, 7);
1032 		rtw_write_rf(rtwdev, path, 0xee, BIT(19), 0);
1033 	}
1034 #undef RF_SET_POWER_TRIM
1035 }
1036 
rtw8822c_power_trim(struct rtw_dev * rtwdev)1037 static void rtw8822c_power_trim(struct rtw_dev *rtwdev)
1038 {
1039 	u8 pg_pwr = 0xff, i, path, idx;
1040 	s8 bb_gain[2][8] = {};
1041 	u16 rf_efuse_2g[3] = {PPG_2GL_TXAB, PPG_2GM_TXAB, PPG_2GH_TXAB};
1042 	u16 rf_efuse_5g[2][5] = {{PPG_5GL1_TXA, PPG_5GL2_TXA, PPG_5GM1_TXA,
1043 				  PPG_5GM2_TXA, PPG_5GH1_TXA},
1044 				 {PPG_5GL1_TXB, PPG_5GL2_TXB, PPG_5GM1_TXB,
1045 				  PPG_5GM2_TXB, PPG_5GH1_TXB} };
1046 	bool set = false;
1047 
1048 	for (i = 0; i < ARRAY_SIZE(rf_efuse_2g); i++) {
1049 		rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[i], &pg_pwr);
1050 		if (pg_pwr == EFUSE_READ_FAIL)
1051 			continue;
1052 		set = true;
1053 		bb_gain[RF_PATH_A][i] = FIELD_GET(PPG_2G_A_MASK, pg_pwr);
1054 		bb_gain[RF_PATH_B][i] = FIELD_GET(PPG_2G_B_MASK, pg_pwr);
1055 	}
1056 
1057 	for (i = 0; i < ARRAY_SIZE(rf_efuse_5g[0]); i++) {
1058 		for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1059 			rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path][i],
1060 						 &pg_pwr);
1061 			if (pg_pwr == EFUSE_READ_FAIL)
1062 				continue;
1063 			set = true;
1064 			idx = i + ARRAY_SIZE(rf_efuse_2g);
1065 			bb_gain[path][idx] = FIELD_GET(PPG_5G_MASK, pg_pwr);
1066 		}
1067 	}
1068 	if (set)
1069 		rtw8822c_set_power_trim(rtwdev, bb_gain);
1070 
1071 	rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1072 }
1073 
rtw8822c_thermal_trim(struct rtw_dev * rtwdev)1074 static void rtw8822c_thermal_trim(struct rtw_dev *rtwdev)
1075 {
1076 	u16 rf_efuse[2] = {PPG_THERMAL_A, PPG_THERMAL_B};
1077 	u8 pg_therm = 0xff, thermal[2] = {0}, path;
1078 
1079 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1080 		rtw_read8_physical_efuse(rtwdev, rf_efuse[path], &pg_therm);
1081 		if (pg_therm == EFUSE_READ_FAIL)
1082 			return;
1083 		/* Efuse value of BIT(0) shall be move to BIT(3), and the value
1084 		 * of BIT(1) to BIT(3) should be right shifted 1 bit.
1085 		 */
1086 		thermal[path] = FIELD_GET(GENMASK(3, 1), pg_therm);
1087 		thermal[path] |= FIELD_PREP(BIT(3), pg_therm & BIT(0));
1088 		rtw_write_rf(rtwdev, path, 0x43, RF_THEMAL_MASK, thermal[path]);
1089 	}
1090 }
1091 
rtw8822c_pa_bias(struct rtw_dev * rtwdev)1092 static void rtw8822c_pa_bias(struct rtw_dev *rtwdev)
1093 {
1094 	u16 rf_efuse_2g[2] = {PPG_PABIAS_2GA, PPG_PABIAS_2GB};
1095 	u16 rf_efuse_5g[2] = {PPG_PABIAS_5GA, PPG_PABIAS_5GB};
1096 	u8 pg_pa_bias = 0xff, path;
1097 
1098 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1099 		rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[path],
1100 					 &pg_pa_bias);
1101 		if (pg_pa_bias == EFUSE_READ_FAIL)
1102 			return;
1103 		pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1104 		rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_2G_MASK, pg_pa_bias);
1105 	}
1106 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1107 		rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path],
1108 					 &pg_pa_bias);
1109 		pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1110 		rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_5G_MASK, pg_pa_bias);
1111 	}
1112 }
1113 
rtw8822c_rfk_handshake(struct rtw_dev * rtwdev,bool is_before_k)1114 static void rtw8822c_rfk_handshake(struct rtw_dev *rtwdev, bool is_before_k)
1115 {
1116 	struct rtw_dm_info *dm = &rtwdev->dm_info;
1117 	u8 u1b_tmp;
1118 	u8 u4b_tmp;
1119 	int ret;
1120 
1121 	if (is_before_k) {
1122 		rtw_dbg(rtwdev, RTW_DBG_RFK,
1123 			"[RFK] WiFi / BT RFK handshake start!!\n");
1124 
1125 		if (!dm->is_bt_iqk_timeout) {
1126 			ret = read_poll_timeout(rtw_read32_mask, u4b_tmp,
1127 						u4b_tmp == 0, 20, 600000, false,
1128 						rtwdev, REG_PMC_DBG_CTRL1,
1129 						BITS_PMC_BT_IQK_STS);
1130 			if (ret) {
1131 				rtw_dbg(rtwdev, RTW_DBG_RFK,
1132 					"[RFK] Wait BT IQK finish timeout!!\n");
1133 				dm->is_bt_iqk_timeout = true;
1134 			}
1135 		}
1136 
1137 		rtw_fw_inform_rfk_status(rtwdev, true);
1138 
1139 		ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1140 					u1b_tmp == 1, 20, 100000, false,
1141 					rtwdev, REG_ARFR4, BIT_WL_RFK);
1142 		if (ret)
1143 			rtw_dbg(rtwdev, RTW_DBG_RFK,
1144 				"[RFK] Send WiFi RFK start H2C cmd FAIL!!\n");
1145 	} else {
1146 		rtw_fw_inform_rfk_status(rtwdev, false);
1147 		ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1148 					u1b_tmp == 1, 20, 100000, false,
1149 					rtwdev, REG_ARFR4,
1150 					BIT_WL_RFK);
1151 		if (ret)
1152 			rtw_dbg(rtwdev, RTW_DBG_RFK,
1153 				"[RFK] Send WiFi RFK finish H2C cmd FAIL!!\n");
1154 
1155 		rtw_dbg(rtwdev, RTW_DBG_RFK,
1156 			"[RFK] WiFi / BT RFK handshake finish!!\n");
1157 	}
1158 }
1159 
rtw8822c_rfk_power_save(struct rtw_dev * rtwdev,bool is_power_save)1160 static void rtw8822c_rfk_power_save(struct rtw_dev *rtwdev,
1161 				    bool is_power_save)
1162 {
1163 	u8 path;
1164 
1165 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1166 		rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1167 		rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_PS_EN,
1168 				 is_power_save ? 0 : 1);
1169 	}
1170 }
1171 
rtw8822c_txgapk_backup_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1172 static void rtw8822c_txgapk_backup_bb_reg(struct rtw_dev *rtwdev, const u32 reg[],
1173 					  u32 reg_backup[], u32 reg_num)
1174 {
1175 	u32 i;
1176 
1177 	for (i = 0; i < reg_num; i++) {
1178 		reg_backup[i] = rtw_read32(rtwdev, reg[i]);
1179 
1180 		rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Backup BB 0x%x = 0x%x\n",
1181 			reg[i], reg_backup[i]);
1182 	}
1183 }
1184 
rtw8822c_txgapk_reload_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1185 static void rtw8822c_txgapk_reload_bb_reg(struct rtw_dev *rtwdev,
1186 					  const u32 reg[], u32 reg_backup[],
1187 					  u32 reg_num)
1188 {
1189 	u32 i;
1190 
1191 	for (i = 0; i < reg_num; i++) {
1192 		rtw_write32(rtwdev, reg[i], reg_backup[i]);
1193 		rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Reload BB 0x%x = 0x%x\n",
1194 			reg[i], reg_backup[i]);
1195 	}
1196 }
1197 
check_rf_status(struct rtw_dev * rtwdev,u8 status)1198 static bool check_rf_status(struct rtw_dev *rtwdev, u8 status)
1199 {
1200 	u8 reg_rf0_a, reg_rf0_b;
1201 
1202 	reg_rf0_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A,
1203 				    RF_MODE_TRXAGC, BIT_RF_MODE);
1204 	reg_rf0_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B,
1205 				    RF_MODE_TRXAGC, BIT_RF_MODE);
1206 
1207 	if (reg_rf0_a == status || reg_rf0_b == status)
1208 		return false;
1209 
1210 	return true;
1211 }
1212 
rtw8822c_txgapk_tx_pause(struct rtw_dev * rtwdev)1213 static void rtw8822c_txgapk_tx_pause(struct rtw_dev *rtwdev)
1214 {
1215 	bool status;
1216 	int ret;
1217 
1218 	rtw_write8(rtwdev, REG_TXPAUSE, BIT_AC_QUEUE);
1219 	rtw_write32_mask(rtwdev, REG_TX_FIFO, BIT_STOP_TX, 0x2);
1220 
1221 	ret = read_poll_timeout_atomic(check_rf_status, status, status,
1222 				       2, 5000, false, rtwdev, 2);
1223 	if (ret)
1224 		rtw_warn(rtwdev, "failed to pause TX\n");
1225 
1226 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Tx pause!!\n");
1227 }
1228 
rtw8822c_txgapk_bb_dpk(struct rtw_dev * rtwdev,u8 path)1229 static void rtw8822c_txgapk_bb_dpk(struct rtw_dev *rtwdev, u8 path)
1230 {
1231 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1232 
1233 	rtw_write32_mask(rtwdev, REG_ENFN, BIT_IQK_DPK_EN, 0x1);
1234 	rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1235 			 BIT_IQK_DPK_CLOCK_SRC, 0x1);
1236 	rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1237 			 BIT_IQK_DPK_RESET_SRC, 0x1);
1238 	rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_EN_IOQ_IQK_DPK, 0x1);
1239 	rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_TST_IQK2SET_SRC, 0x0);
1240 	rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x1ff);
1241 
1242 	if (path == RF_PATH_A) {
1243 		rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1244 				 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1245 		rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x1);
1246 		rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1247 				 BIT_TX_SCALE_0DB, 0x1);
1248 		rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x0);
1249 	} else if (path == RF_PATH_B) {
1250 		rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1251 				 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1252 		rtw_write32_mask(rtwdev, REG_3WIRE2,
1253 				 BIT_DIS_SHARERX_TXGAT, 0x1);
1254 		rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1255 				 BIT_TX_SCALE_0DB, 0x1);
1256 		rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x0);
1257 	}
1258 	rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x2);
1259 }
1260 
rtw8822c_txgapk_afe_dpk(struct rtw_dev * rtwdev,u8 path)1261 static void rtw8822c_txgapk_afe_dpk(struct rtw_dev *rtwdev, u8 path)
1262 {
1263 	u32 reg;
1264 
1265 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1266 
1267 	if (path == RF_PATH_A) {
1268 		reg = REG_ANAPAR_A;
1269 	} else if (path == RF_PATH_B) {
1270 		reg = REG_ANAPAR_B;
1271 	} else {
1272 		rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1273 		return;
1274 	}
1275 
1276 	rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, MASKDWORD);
1277 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1278 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1279 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x701f0001);
1280 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x702f0001);
1281 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x703f0001);
1282 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x704f0001);
1283 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705f0001);
1284 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x706f0001);
1285 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707f0001);
1286 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708f0001);
1287 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709f0001);
1288 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70af0001);
1289 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bf0001);
1290 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cf0001);
1291 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70df0001);
1292 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ef0001);
1293 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1294 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1295 }
1296 
rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev * rtwdev,u8 path)1297 static void rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1298 {
1299 	u32 reg;
1300 
1301 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1302 
1303 	if (path == RF_PATH_A) {
1304 		reg = REG_ANAPAR_A;
1305 	} else if (path == RF_PATH_B) {
1306 		reg = REG_ANAPAR_B;
1307 	} else {
1308 		rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1309 		return;
1310 	}
1311 	rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, 0xffa1005e);
1312 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700b8041);
1313 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70144041);
1314 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70244041);
1315 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70344041);
1316 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70444041);
1317 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705b8041);
1318 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70644041);
1319 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707b8041);
1320 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708b8041);
1321 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709b8041);
1322 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ab8041);
1323 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bb8041);
1324 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cb8041);
1325 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70db8041);
1326 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70eb8041);
1327 	rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70fb8041);
1328 }
1329 
rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev * rtwdev,u8 path)1330 static void rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1331 {
1332 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1333 
1334 	rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x0);
1335 	rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TIA_BYPASS, 0x0);
1336 	rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TXBB, 0x0);
1337 
1338 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1339 	rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1340 	rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1341 	rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1342 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x1);
1343 	rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1344 	rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1345 	rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1346 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1347 	rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x0);
1348 
1349 	if (path == RF_PATH_A) {
1350 		rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1351 				 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1352 		rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x0);
1353 		rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1354 				 BIT_TX_SCALE_0DB, 0x0);
1355 		rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x3);
1356 	} else if (path == RF_PATH_B) {
1357 		rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1358 				 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1359 		rtw_write32_mask(rtwdev, REG_3WIRE2,
1360 				 BIT_DIS_SHARERX_TXGAT, 0x0);
1361 		rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1362 				 BIT_TX_SCALE_0DB, 0x0);
1363 		rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x3);
1364 	}
1365 
1366 	rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x0);
1367 	rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_CFIR_EN, 0x5);
1368 }
1369 
_rtw8822c_txgapk_gain_valid(struct rtw_dev * rtwdev,u32 gain)1370 static bool _rtw8822c_txgapk_gain_valid(struct rtw_dev *rtwdev, u32 gain)
1371 {
1372 	if ((FIELD_GET(BIT_GAIN_TX_PAD_H, gain) >= 0xc) &&
1373 	    (FIELD_GET(BIT_GAIN_TX_PAD_L, gain) >= 0xe))
1374 		return true;
1375 
1376 	return false;
1377 }
1378 
_rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev,u8 band,u8 path)1379 static void _rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev,
1380 						 u8 band, u8 path)
1381 {
1382 	struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1383 	u32 v, tmp_3f = 0;
1384 	u8 gain, check_txgain;
1385 
1386 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1387 
1388 	switch (band) {
1389 	case RF_BAND_2G_OFDM:
1390 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1391 		break;
1392 	case RF_BAND_5G_L:
1393 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1394 		break;
1395 	case RF_BAND_5G_M:
1396 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1397 		break;
1398 	case RF_BAND_5G_H:
1399 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1400 		break;
1401 	default:
1402 		break;
1403 	}
1404 
1405 	rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, MASKBYTE0, 0x88);
1406 
1407 	check_txgain = 0;
1408 	for (gain = 0; gain < RF_GAIN_NUM; gain++) {
1409 		v = txgapk->rf3f_bp[band][gain][path];
1410 		if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1411 			if (!check_txgain) {
1412 				tmp_3f = txgapk->rf3f_bp[band][gain][path];
1413 				check_txgain = 1;
1414 			}
1415 			rtw_dbg(rtwdev, RTW_DBG_RFK,
1416 				"[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1417 				txgapk->rf3f_bp[band][gain][path]);
1418 		} else {
1419 			tmp_3f = txgapk->rf3f_bp[band][gain][path];
1420 		}
1421 
1422 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN, tmp_3f);
1423 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_I_GAIN, gain);
1424 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x1);
1425 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x0);
1426 
1427 		rtw_dbg(rtwdev, RTW_DBG_RFK,
1428 			"[TXGAPK] Band=%d 0x1b98[11:0]=0x%03X path=%d\n",
1429 			band, tmp_3f, path);
1430 	}
1431 }
1432 
rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev)1433 static void rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev)
1434 {
1435 	u8 path, band;
1436 
1437 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1438 		__func__, rtwdev->dm_info.gapk.channel);
1439 
1440 	for (band = 0; band < RF_BAND_MAX; band++) {
1441 		for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1442 			_rtw8822c_txgapk_write_gain_bb_table(rtwdev,
1443 							     band, path);
1444 		}
1445 	}
1446 }
1447 
rtw8822c_txgapk_read_offset(struct rtw_dev * rtwdev,u8 path)1448 static void rtw8822c_txgapk_read_offset(struct rtw_dev *rtwdev, u8 path)
1449 {
1450 	static const u32 cfg1_1b00[2] = {0x00000d18, 0x00000d2a};
1451 	static const u32 cfg2_1b00[2] = {0x00000d19, 0x00000d2b};
1452 	static const u32 set_pi[2] = {REG_RSV_CTRL, REG_WLRF1};
1453 	static const u32 path_setting[2] = {REG_ORITXCODE, REG_ORITXCODE2};
1454 	struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1455 	u8 channel = txgapk->channel;
1456 	u32 val;
1457 	int i;
1458 
1459 	if (path >= ARRAY_SIZE(cfg1_1b00) ||
1460 	    path >= ARRAY_SIZE(cfg2_1b00) ||
1461 	    path >= ARRAY_SIZE(set_pi) ||
1462 	    path >= ARRAY_SIZE(path_setting)) {
1463 		rtw_warn(rtwdev, "[TXGAPK] wrong path %d\n", path);
1464 		return;
1465 	}
1466 
1467 	rtw_write32_mask(rtwdev, REG_ANTMAP0, BIT_ANT_PATH, path + 1);
1468 	rtw_write32_mask(rtwdev, REG_TXLGMAP, MASKDWORD, 0xe4e40000);
1469 	rtw_write32_mask(rtwdev, REG_TXANTSEG, BIT_ANTSEG, 0x3);
1470 	rtw_write32_mask(rtwdev, path_setting[path], MASK20BITS, 0x33312);
1471 	rtw_write32_mask(rtwdev, path_setting[path], BIT_PATH_EN, 0x1);
1472 	rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x0);
1473 	rtw_write_rf(rtwdev, path, RF_LUTDBG, BIT_TXA_TANK, 0x1);
1474 	rtw_write_rf(rtwdev, path, RF_IDAC, BIT_TX_MODE, 0x820);
1475 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1476 	rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1477 
1478 	rtw_write32_mask(rtwdev, REG_TX_TONE_IDX, MASKBYTE0, 0x018);
1479 	fsleep(1000);
1480 	if (channel >= 1 && channel <= 14)
1481 		rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_2G_SWING);
1482 	else
1483 		rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_5G_SWING);
1484 	fsleep(1000);
1485 
1486 	rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg1_1b00[path]);
1487 	rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg2_1b00[path]);
1488 
1489 	read_poll_timeout(rtw_read32_mask, val,
1490 			  val == 0x55, 1000, 100000, false,
1491 			  rtwdev, REG_RPT_CIP, BIT_RPT_CIP_STATUS);
1492 
1493 	rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x2);
1494 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1495 	rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_EN, 0x1);
1496 	rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x12);
1497 	rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x3);
1498 	val = rtw_read32(rtwdev, REG_STAT_RPT);
1499 
1500 	txgapk->offset[0][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1501 	txgapk->offset[1][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1502 	txgapk->offset[2][path] = (s8)FIELD_GET(BIT_GAPK_RPT2, val);
1503 	txgapk->offset[3][path] = (s8)FIELD_GET(BIT_GAPK_RPT3, val);
1504 	txgapk->offset[4][path] = (s8)FIELD_GET(BIT_GAPK_RPT4, val);
1505 	txgapk->offset[5][path] = (s8)FIELD_GET(BIT_GAPK_RPT5, val);
1506 	txgapk->offset[6][path] = (s8)FIELD_GET(BIT_GAPK_RPT6, val);
1507 	txgapk->offset[7][path] = (s8)FIELD_GET(BIT_GAPK_RPT7, val);
1508 
1509 	rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x4);
1510 	val = rtw_read32(rtwdev, REG_STAT_RPT);
1511 
1512 	txgapk->offset[8][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1513 	txgapk->offset[9][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1514 
1515 	for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1516 		if (txgapk->offset[i][path] & BIT(3))
1517 			txgapk->offset[i][path] = txgapk->offset[i][path] |
1518 						  0xf0;
1519 	for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1520 		rtw_dbg(rtwdev, RTW_DBG_RFK,
1521 			"[TXGAPK] offset %d %d path=%d\n",
1522 			txgapk->offset[i][path], i, path);
1523 }
1524 
rtw8822c_txgapk_calculate_offset(struct rtw_dev * rtwdev,u8 path)1525 static void rtw8822c_txgapk_calculate_offset(struct rtw_dev *rtwdev, u8 path)
1526 {
1527 	static const u32 bb_reg[] = {REG_ANTMAP0, REG_TXLGMAP, REG_TXANTSEG,
1528 				     REG_ORITXCODE, REG_ORITXCODE2};
1529 	struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1530 	u8 channel = txgapk->channel;
1531 	u32 reg_backup[ARRAY_SIZE(bb_reg)] = {0};
1532 
1533 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1534 		__func__, channel);
1535 
1536 	rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1537 				      reg_backup, ARRAY_SIZE(bb_reg));
1538 
1539 	if (channel >= 1 && channel <= 14) {
1540 		rtw_write32_mask(rtwdev,
1541 				 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1542 		rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1543 		rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1544 		rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1545 		rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1546 		rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x5000f);
1547 		rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x0);
1548 		rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x1);
1549 		rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0f);
1550 		rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1551 		rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
1552 		rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1553 		rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1554 
1555 		rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x00);
1556 		rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1557 
1558 		rtw8822c_txgapk_read_offset(rtwdev, path);
1559 		rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1560 
1561 	} else {
1562 		rtw_write32_mask(rtwdev,
1563 				 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1564 		rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1565 		rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1566 		rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1567 		rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1568 		rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50011);
1569 		rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x3);
1570 		rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x3);
1571 		rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
1572 		rtw_write_rf(rtwdev, path,
1573 			     RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0x2);
1574 		rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x12);
1575 		rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1576 		rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1577 		rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1578 		rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x5);
1579 
1580 		rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1581 
1582 		if (channel >= 36 && channel <= 64)
1583 			rtw_write32_mask(rtwdev,
1584 					 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1585 		else if (channel >= 100 && channel <= 144)
1586 			rtw_write32_mask(rtwdev,
1587 					 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1588 		else if (channel >= 149 && channel <= 177)
1589 			rtw_write32_mask(rtwdev,
1590 					 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1591 
1592 		rtw8822c_txgapk_read_offset(rtwdev, path);
1593 		rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1594 	}
1595 	rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1596 				      reg_backup, ARRAY_SIZE(bb_reg));
1597 }
1598 
rtw8822c_txgapk_rf_restore(struct rtw_dev * rtwdev,u8 path)1599 static void rtw8822c_txgapk_rf_restore(struct rtw_dev *rtwdev, u8 path)
1600 {
1601 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1602 
1603 	if (path >= rtwdev->hal.rf_path_num)
1604 		return;
1605 
1606 	rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x3);
1607 	rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x0);
1608 	rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x0);
1609 }
1610 
rtw8822c_txgapk_cal_gain(struct rtw_dev * rtwdev,u32 gain,s8 offset)1611 static u32 rtw8822c_txgapk_cal_gain(struct rtw_dev *rtwdev, u32 gain, s8 offset)
1612 {
1613 	u32 gain_x2, new_gain;
1614 
1615 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1616 
1617 	if (_rtw8822c_txgapk_gain_valid(rtwdev, gain)) {
1618 		new_gain = gain;
1619 		rtw_dbg(rtwdev, RTW_DBG_RFK,
1620 			"[TXGAPK] gain=0x%03X(>=0xCEX) offset=%d new_gain=0x%03X\n",
1621 			gain, offset, new_gain);
1622 		return new_gain;
1623 	}
1624 
1625 	gain_x2 = (gain << 1) + offset;
1626 	new_gain = (gain_x2 >> 1) | (gain_x2 & BIT(0) ? BIT_GAIN_EXT : 0);
1627 
1628 	rtw_dbg(rtwdev, RTW_DBG_RFK,
1629 		"[TXGAPK] gain=0x%X offset=%d new_gain=0x%X\n",
1630 		gain, offset, new_gain);
1631 
1632 	return new_gain;
1633 }
1634 
rtw8822c_txgapk_write_tx_gain(struct rtw_dev * rtwdev)1635 static void rtw8822c_txgapk_write_tx_gain(struct rtw_dev *rtwdev)
1636 {
1637 	struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1638 	u32 i, j, tmp = 0x20, tmp_3f, v;
1639 	s8 offset_tmp[RF_GAIN_NUM] = {0};
1640 	u8 path, band = RF_BAND_2G_OFDM, channel = txgapk->channel;
1641 
1642 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1643 
1644 	if (channel >= 1 && channel <= 14) {
1645 		tmp = 0x20;
1646 		band = RF_BAND_2G_OFDM;
1647 	} else if (channel >= 36 && channel <= 64) {
1648 		tmp = 0x200;
1649 		band = RF_BAND_5G_L;
1650 	} else if (channel >= 100 && channel <= 144) {
1651 		tmp = 0x280;
1652 		band = RF_BAND_5G_M;
1653 	} else if (channel >= 149 && channel <= 177) {
1654 		tmp = 0x300;
1655 		band = RF_BAND_5G_H;
1656 	} else {
1657 		rtw_err(rtwdev, "[TXGAPK] unknown channel %d!!\n", channel);
1658 		return;
1659 	}
1660 
1661 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1662 		for (i = 0; i < RF_GAIN_NUM; i++) {
1663 			offset_tmp[i] = 0;
1664 			for (j = i; j < RF_GAIN_NUM; j++) {
1665 				v = txgapk->rf3f_bp[band][j][path];
1666 				if (_rtw8822c_txgapk_gain_valid(rtwdev, v))
1667 					continue;
1668 
1669 				offset_tmp[i] += txgapk->offset[j][path];
1670 				txgapk->fianl_offset[i][path] = offset_tmp[i];
1671 			}
1672 
1673 			v = txgapk->rf3f_bp[band][i][path];
1674 			if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1675 				rtw_dbg(rtwdev, RTW_DBG_RFK,
1676 					"[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1677 					txgapk->rf3f_bp[band][i][path]);
1678 			} else {
1679 				txgapk->rf3f_fs[path][i] = offset_tmp[i];
1680 				rtw_dbg(rtwdev, RTW_DBG_RFK,
1681 					"[TXGAPK] offset %d %d\n",
1682 					offset_tmp[i], i);
1683 			}
1684 		}
1685 
1686 		rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x10000);
1687 		for (i = 0; i < RF_GAIN_NUM; i++) {
1688 			rtw_write_rf(rtwdev, path,
1689 				     RF_LUTWA, RFREG_MASK, tmp + i);
1690 
1691 			tmp_3f = rtw8822c_txgapk_cal_gain(rtwdev,
1692 							  txgapk->rf3f_bp[band][i][path],
1693 							  offset_tmp[i]);
1694 			rtw_write_rf(rtwdev, path, RF_LUTWD0,
1695 				     BIT_GAIN_EXT | BIT_DATA_L, tmp_3f);
1696 
1697 			rtw_dbg(rtwdev, RTW_DBG_RFK,
1698 				"[TXGAPK] 0x33=0x%05X 0x3f=0x%04X\n",
1699 				tmp + i, tmp_3f);
1700 		}
1701 		rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x0);
1702 	}
1703 }
1704 
rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev * rtwdev)1705 static void rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev *rtwdev)
1706 {
1707 	struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1708 	static const u32 three_wire[2] = {REG_3WIRE, REG_3WIRE2};
1709 	static const u8 ch_num[RF_BAND_MAX] = {1, 1, 36, 100, 149};
1710 	static const u8 band_num[RF_BAND_MAX] = {0x0, 0x0, 0x1, 0x3, 0x5};
1711 	static const u8 cck[RF_BAND_MAX] = {0x1, 0x0, 0x0, 0x0, 0x0};
1712 	u8 path, band, gain, rf0_idx;
1713 	u32 rf18, v;
1714 
1715 	if (rtwdev->dm_info.dm_flags & BIT(RTW_DM_CAP_TXGAPK))
1716 		return;
1717 
1718 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1719 
1720 	if (txgapk->read_txgain == 1) {
1721 		rtw_dbg(rtwdev, RTW_DBG_RFK,
1722 			"[TXGAPK] Already Read txgapk->read_txgain return!!!\n");
1723 		rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1724 		return;
1725 	}
1726 
1727 	for (band = 0; band < RF_BAND_MAX; band++) {
1728 		for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1729 			rf18 = rtw_read_rf(rtwdev, path, RF_CFGCH, RFREG_MASK);
1730 
1731 			rtw_write32_mask(rtwdev,
1732 					 three_wire[path], BIT_3WIRE_EN, 0x0);
1733 			rtw_write_rf(rtwdev, path,
1734 				     RF_CFGCH, MASKBYTE0, ch_num[band]);
1735 			rtw_write_rf(rtwdev, path,
1736 				     RF_CFGCH, BIT_BAND, band_num[band]);
1737 			rtw_write_rf(rtwdev, path,
1738 				     RF_BW_TRXBB, BIT_DBG_CCK_CCA, cck[band]);
1739 			rtw_write_rf(rtwdev, path,
1740 				     RF_BW_TRXBB, BIT_TX_CCK_IND, cck[band]);
1741 			gain = 0;
1742 			for (rf0_idx = 1; rf0_idx < 32; rf0_idx += 3) {
1743 				rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC,
1744 					     MASKBYTE0, rf0_idx);
1745 				v = rtw_read_rf(rtwdev, path,
1746 						RF_TX_RESULT, RFREG_MASK);
1747 				txgapk->rf3f_bp[band][gain][path] = v & BIT_DATA_L;
1748 
1749 				rtw_dbg(rtwdev, RTW_DBG_RFK,
1750 					"[TXGAPK] 0x5f=0x%03X band=%d path=%d\n",
1751 					txgapk->rf3f_bp[band][gain][path],
1752 					band, path);
1753 				gain++;
1754 			}
1755 			rtw_write_rf(rtwdev, path, RF_CFGCH, RFREG_MASK, rf18);
1756 			rtw_write32_mask(rtwdev,
1757 					 three_wire[path], BIT_3WIRE_EN, 0x3);
1758 		}
1759 	}
1760 	rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1761 	txgapk->read_txgain = 1;
1762 }
1763 
rtw8822c_txgapk(struct rtw_dev * rtwdev)1764 static void rtw8822c_txgapk(struct rtw_dev *rtwdev)
1765 {
1766 	static const u32 bb_reg[2] = {REG_TX_PTCL_CTRL, REG_TX_FIFO};
1767 	struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1768 	u32 bb_reg_backup[2];
1769 	u8 path;
1770 
1771 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1772 
1773 	rtw8822c_txgapk_save_all_tx_gain_table(rtwdev);
1774 
1775 	if (txgapk->read_txgain == 0) {
1776 		rtw_dbg(rtwdev, RTW_DBG_RFK,
1777 			"[TXGAPK] txgapk->read_txgain == 0 return!!!\n");
1778 		return;
1779 	}
1780 
1781 	if (rtwdev->efuse.power_track_type >= 4 &&
1782 	    rtwdev->efuse.power_track_type <= 7) {
1783 		rtw_dbg(rtwdev, RTW_DBG_RFK,
1784 			"[TXGAPK] Normal Mode in TSSI mode. return!!!\n");
1785 		return;
1786 	}
1787 
1788 	rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1789 				      bb_reg_backup, ARRAY_SIZE(bb_reg));
1790 	rtw8822c_txgapk_tx_pause(rtwdev);
1791 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1792 		txgapk->channel = rtw_read_rf(rtwdev, path,
1793 					      RF_CFGCH, RFREG_MASK) & MASKBYTE0;
1794 		rtw8822c_txgapk_bb_dpk(rtwdev, path);
1795 		rtw8822c_txgapk_afe_dpk(rtwdev, path);
1796 		rtw8822c_txgapk_calculate_offset(rtwdev, path);
1797 		rtw8822c_txgapk_rf_restore(rtwdev, path);
1798 		rtw8822c_txgapk_afe_dpk_restore(rtwdev, path);
1799 		rtw8822c_txgapk_bb_dpk_restore(rtwdev, path);
1800 	}
1801 	rtw8822c_txgapk_write_tx_gain(rtwdev);
1802 	rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1803 				      bb_reg_backup, ARRAY_SIZE(bb_reg));
1804 }
1805 
rtw8822c_do_gapk(struct rtw_dev * rtwdev)1806 static void rtw8822c_do_gapk(struct rtw_dev *rtwdev)
1807 {
1808 	struct rtw_dm_info *dm = &rtwdev->dm_info;
1809 
1810 	if (dm->dm_flags & BIT(RTW_DM_CAP_TXGAPK)) {
1811 		rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] feature disable!!!\n");
1812 		return;
1813 	}
1814 	rtw8822c_rfk_handshake(rtwdev, true);
1815 	rtw8822c_txgapk(rtwdev);
1816 	rtw8822c_rfk_handshake(rtwdev, false);
1817 }
1818 
rtw8822c_rf_init(struct rtw_dev * rtwdev)1819 static void rtw8822c_rf_init(struct rtw_dev *rtwdev)
1820 {
1821 	rtw8822c_rf_dac_cal(rtwdev);
1822 	rtw8822c_rf_x2_check(rtwdev);
1823 	rtw8822c_thermal_trim(rtwdev);
1824 	rtw8822c_power_trim(rtwdev);
1825 	rtw8822c_pa_bias(rtwdev);
1826 }
1827 
rtw8822c_pwrtrack_init(struct rtw_dev * rtwdev)1828 static void rtw8822c_pwrtrack_init(struct rtw_dev *rtwdev)
1829 {
1830 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1831 	u8 path;
1832 
1833 	for (path = RF_PATH_A; path < RTW_RF_PATH_MAX; path++) {
1834 		dm_info->delta_power_index[path] = 0;
1835 		ewma_thermal_init(&dm_info->avg_thermal[path]);
1836 		dm_info->thermal_avg[path] = 0xff;
1837 	}
1838 
1839 	dm_info->pwr_trk_triggered = false;
1840 	dm_info->thermal_meter_k = rtwdev->efuse.thermal_meter_k;
1841 	dm_info->thermal_meter_lck = rtwdev->efuse.thermal_meter_k;
1842 }
1843 
rtw8822c_phy_set_param(struct rtw_dev * rtwdev)1844 static void rtw8822c_phy_set_param(struct rtw_dev *rtwdev)
1845 {
1846 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1847 	struct rtw_hal *hal = &rtwdev->hal;
1848 	u8 crystal_cap;
1849 	u8 cck_gi_u_bnd_msb = 0;
1850 	u8 cck_gi_u_bnd_lsb = 0;
1851 	u8 cck_gi_l_bnd_msb = 0;
1852 	u8 cck_gi_l_bnd_lsb = 0;
1853 	bool is_tx2_path;
1854 
1855 	/* power on BB/RF domain */
1856 	rtw_write8_set(rtwdev, REG_SYS_FUNC_EN,
1857 		       BIT_FEN_BB_GLB_RST | BIT_FEN_BB_RSTB);
1858 	rtw_write8_set(rtwdev, REG_RF_CTRL,
1859 		       BIT_RF_EN | BIT_RF_RSTB | BIT_RF_SDM_RSTB);
1860 	rtw_write32_set(rtwdev, REG_WLRF1, BIT_WLRF1_BBRF_EN);
1861 
1862 	/* disable low rate DPD */
1863 	rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1864 
1865 	/* pre init before header files config */
1866 	rtw8822c_header_file_init(rtwdev, true);
1867 
1868 	rtw_phy_load_tables(rtwdev);
1869 
1870 	crystal_cap = rtwdev->efuse.crystal_cap & 0x7f;
1871 	rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, 0xfffc00,
1872 			 crystal_cap | (crystal_cap << 7));
1873 
1874 	/* post init after header files config */
1875 	rtw8822c_header_file_init(rtwdev, false);
1876 
1877 	is_tx2_path = false;
1878 	rtw8822c_config_trx_mode(rtwdev, hal->antenna_tx, hal->antenna_rx,
1879 				 is_tx2_path);
1880 	rtw_phy_init(rtwdev);
1881 
1882 	cck_gi_u_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc000);
1883 	cck_gi_u_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1aa8, 0xf0000);
1884 	cck_gi_l_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc0);
1885 	cck_gi_l_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1a70, 0x0f000000);
1886 
1887 	dm_info->cck_gi_u_bnd = ((cck_gi_u_bnd_msb << 4) | (cck_gi_u_bnd_lsb));
1888 	dm_info->cck_gi_l_bnd = ((cck_gi_l_bnd_msb << 4) | (cck_gi_l_bnd_lsb));
1889 
1890 	rtw8822c_rf_init(rtwdev);
1891 	rtw8822c_pwrtrack_init(rtwdev);
1892 
1893 	rtw_bf_phy_init(rtwdev);
1894 }
1895 
1896 #define WLAN_TXQ_RPT_EN		0x1F
1897 #define WLAN_SLOT_TIME		0x09
1898 #define WLAN_PIFS_TIME		0x1C
1899 #define WLAN_SIFS_CCK_CONT_TX	0x0A
1900 #define WLAN_SIFS_OFDM_CONT_TX	0x0E
1901 #define WLAN_SIFS_CCK_TRX	0x0A
1902 #define WLAN_SIFS_OFDM_TRX	0x10
1903 #define WLAN_NAV_MAX		0xC8
1904 #define WLAN_RDG_NAV		0x05
1905 #define WLAN_TXOP_NAV		0x1B
1906 #define WLAN_CCK_RX_TSF		0x30
1907 #define WLAN_OFDM_RX_TSF	0x30
1908 #define WLAN_TBTT_PROHIBIT	0x04 /* unit : 32us */
1909 #define WLAN_TBTT_HOLD_TIME	0x064 /* unit : 32us */
1910 #define WLAN_DRV_EARLY_INT	0x04
1911 #define WLAN_BCN_CTRL_CLT0	0x10
1912 #define WLAN_BCN_DMA_TIME	0x02
1913 #define WLAN_BCN_MAX_ERR	0xFF
1914 #define WLAN_SIFS_CCK_DUR_TUNE	0x0A
1915 #define WLAN_SIFS_OFDM_DUR_TUNE	0x10
1916 #define WLAN_SIFS_CCK_CTX	0x0A
1917 #define WLAN_SIFS_CCK_IRX	0x0A
1918 #define WLAN_SIFS_OFDM_CTX	0x0E
1919 #define WLAN_SIFS_OFDM_IRX	0x0E
1920 #define WLAN_EIFS_DUR_TUNE	0x40
1921 #define WLAN_EDCA_VO_PARAM	0x002FA226
1922 #define WLAN_EDCA_VI_PARAM	0x005EA328
1923 #define WLAN_EDCA_BE_PARAM	0x005EA42B
1924 #define WLAN_EDCA_BK_PARAM	0x0000A44F
1925 
1926 #define WLAN_RX_FILTER0		0xFFFFFFFF
1927 #define WLAN_RX_FILTER2		0xFFFF
1928 #define WLAN_RCR_CFG		0xE400220E
1929 #define WLAN_RXPKT_MAX_SZ	12288
1930 #define WLAN_RXPKT_MAX_SZ_512	(WLAN_RXPKT_MAX_SZ >> 9)
1931 
1932 #define WLAN_AMPDU_MAX_TIME		0x70
1933 #define WLAN_RTS_LEN_TH			0xFF
1934 #define WLAN_RTS_TX_TIME_TH		0x08
1935 #define WLAN_MAX_AGG_PKT_LIMIT		0x3f
1936 #define WLAN_RTS_MAX_AGG_PKT_LIMIT	0x3f
1937 #define WLAN_PRE_TXCNT_TIME_TH		0x1E0
1938 #define FAST_EDCA_VO_TH		0x06
1939 #define FAST_EDCA_VI_TH		0x06
1940 #define FAST_EDCA_BE_TH		0x06
1941 #define FAST_EDCA_BK_TH		0x06
1942 #define WLAN_BAR_RETRY_LIMIT		0x01
1943 #define WLAN_BAR_ACK_TYPE		0x05
1944 #define WLAN_RA_TRY_RATE_AGG_LIMIT	0x08
1945 #define WLAN_RESP_TXRATE		0x84
1946 #define WLAN_ACK_TO			0x21
1947 #define WLAN_ACK_TO_CCK			0x6A
1948 #define WLAN_DATA_RATE_FB_CNT_1_4	0x01000000
1949 #define WLAN_DATA_RATE_FB_CNT_5_8	0x08070504
1950 #define WLAN_RTS_RATE_FB_CNT_5_8	0x08070504
1951 #define WLAN_DATA_RATE_FB_RATE0		0xFE01F010
1952 #define WLAN_DATA_RATE_FB_RATE0_H	0x40000000
1953 #define WLAN_RTS_RATE_FB_RATE1		0x003FF010
1954 #define WLAN_RTS_RATE_FB_RATE1_H	0x40000000
1955 #define WLAN_RTS_RATE_FB_RATE4		0x0600F010
1956 #define WLAN_RTS_RATE_FB_RATE4_H	0x400003E0
1957 #define WLAN_RTS_RATE_FB_RATE5		0x0600F015
1958 #define WLAN_RTS_RATE_FB_RATE5_H	0x000000E0
1959 #define WLAN_MULTI_ADDR			0xFFFFFFFF
1960 
1961 #define WLAN_TX_FUNC_CFG1		0x30
1962 #define WLAN_TX_FUNC_CFG2		0x30
1963 #define WLAN_MAC_OPT_NORM_FUNC1		0x98
1964 #define WLAN_MAC_OPT_LB_FUNC1		0x80
1965 #define WLAN_MAC_OPT_FUNC2		0xb0810041
1966 #define WLAN_MAC_INT_MIG_CFG		0x33330000
1967 
1968 #define WLAN_SIFS_CFG	(WLAN_SIFS_CCK_CONT_TX | \
1969 			(WLAN_SIFS_OFDM_CONT_TX << BIT_SHIFT_SIFS_OFDM_CTX) | \
1970 			(WLAN_SIFS_CCK_TRX << BIT_SHIFT_SIFS_CCK_TRX) | \
1971 			(WLAN_SIFS_OFDM_TRX << BIT_SHIFT_SIFS_OFDM_TRX))
1972 
1973 #define WLAN_SIFS_DUR_TUNE	(WLAN_SIFS_CCK_DUR_TUNE | \
1974 				(WLAN_SIFS_OFDM_DUR_TUNE << 8))
1975 
1976 #define WLAN_TBTT_TIME	(WLAN_TBTT_PROHIBIT |\
1977 			(WLAN_TBTT_HOLD_TIME << BIT_SHIFT_TBTT_HOLD_TIME_AP))
1978 
1979 #define WLAN_NAV_CFG		(WLAN_RDG_NAV | (WLAN_TXOP_NAV << 16))
1980 #define WLAN_RX_TSF_CFG		(WLAN_CCK_RX_TSF | (WLAN_OFDM_RX_TSF) << 8)
1981 
1982 #define MAC_CLK_SPEED	80 /* 80M */
1983 #define EFUSE_PCB_INFO_OFFSET	0xCA
1984 
rtw8822c_mac_init(struct rtw_dev * rtwdev)1985 static int rtw8822c_mac_init(struct rtw_dev *rtwdev)
1986 {
1987 	u8 value8;
1988 	u16 value16;
1989 	u32 value32;
1990 	u16 pre_txcnt;
1991 
1992 	/* txq control */
1993 	value8 = rtw_read8(rtwdev, REG_FWHW_TXQ_CTRL);
1994 	value8 |= (BIT(7) & ~BIT(1) & ~BIT(2));
1995 	rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL, value8);
1996 	rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL + 1, WLAN_TXQ_RPT_EN);
1997 	/* sifs control */
1998 	rtw_write16(rtwdev, REG_SPEC_SIFS, WLAN_SIFS_DUR_TUNE);
1999 	rtw_write32(rtwdev, REG_SIFS, WLAN_SIFS_CFG);
2000 	rtw_write16(rtwdev, REG_RESP_SIFS_CCK,
2001 		    WLAN_SIFS_CCK_CTX | WLAN_SIFS_CCK_IRX << 8);
2002 	rtw_write16(rtwdev, REG_RESP_SIFS_OFDM,
2003 		    WLAN_SIFS_OFDM_CTX | WLAN_SIFS_OFDM_IRX << 8);
2004 	/* rate fallback control */
2005 	rtw_write32(rtwdev, REG_DARFRC, WLAN_DATA_RATE_FB_CNT_1_4);
2006 	rtw_write32(rtwdev, REG_DARFRCH, WLAN_DATA_RATE_FB_CNT_5_8);
2007 	rtw_write32(rtwdev, REG_RARFRCH, WLAN_RTS_RATE_FB_CNT_5_8);
2008 	rtw_write32(rtwdev, REG_ARFR0, WLAN_DATA_RATE_FB_RATE0);
2009 	rtw_write32(rtwdev, REG_ARFRH0, WLAN_DATA_RATE_FB_RATE0_H);
2010 	rtw_write32(rtwdev, REG_ARFR1_V1, WLAN_RTS_RATE_FB_RATE1);
2011 	rtw_write32(rtwdev, REG_ARFRH1_V1, WLAN_RTS_RATE_FB_RATE1_H);
2012 	rtw_write32(rtwdev, REG_ARFR4, WLAN_RTS_RATE_FB_RATE4);
2013 	rtw_write32(rtwdev, REG_ARFRH4, WLAN_RTS_RATE_FB_RATE4_H);
2014 	rtw_write32(rtwdev, REG_ARFR5, WLAN_RTS_RATE_FB_RATE5);
2015 	rtw_write32(rtwdev, REG_ARFRH5, WLAN_RTS_RATE_FB_RATE5_H);
2016 	/* protocol configuration */
2017 	rtw_write8(rtwdev, REG_AMPDU_MAX_TIME_V1, WLAN_AMPDU_MAX_TIME);
2018 	rtw_write8_set(rtwdev, REG_TX_HANG_CTRL, BIT_EN_EOF_V1);
2019 	pre_txcnt = WLAN_PRE_TXCNT_TIME_TH | BIT_EN_PRECNT;
2020 	rtw_write8(rtwdev, REG_PRECNT_CTRL, (u8)(pre_txcnt & 0xFF));
2021 	rtw_write8(rtwdev, REG_PRECNT_CTRL + 1, (u8)(pre_txcnt >> 8));
2022 	value32 = WLAN_RTS_LEN_TH | (WLAN_RTS_TX_TIME_TH << 8) |
2023 		  (WLAN_MAX_AGG_PKT_LIMIT << 16) |
2024 		  (WLAN_RTS_MAX_AGG_PKT_LIMIT << 24);
2025 	rtw_write32(rtwdev, REG_PROT_MODE_CTRL, value32);
2026 	rtw_write16(rtwdev, REG_BAR_MODE_CTRL + 2,
2027 		    WLAN_BAR_RETRY_LIMIT | WLAN_RA_TRY_RATE_AGG_LIMIT << 8);
2028 	rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING, FAST_EDCA_VO_TH);
2029 	rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING + 2, FAST_EDCA_VI_TH);
2030 	rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING, FAST_EDCA_BE_TH);
2031 	rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING + 2, FAST_EDCA_BK_TH);
2032 	/* close BA parser */
2033 	rtw_write8_clr(rtwdev, REG_LIFETIME_EN, BIT_BA_PARSER_EN);
2034 	rtw_write32_clr(rtwdev, REG_RRSR, BITS_RRSR_RSC);
2035 
2036 	/* EDCA configuration */
2037 	rtw_write32(rtwdev, REG_EDCA_VO_PARAM, WLAN_EDCA_VO_PARAM);
2038 	rtw_write32(rtwdev, REG_EDCA_VI_PARAM, WLAN_EDCA_VI_PARAM);
2039 	rtw_write32(rtwdev, REG_EDCA_BE_PARAM, WLAN_EDCA_BE_PARAM);
2040 	rtw_write32(rtwdev, REG_EDCA_BK_PARAM, WLAN_EDCA_BK_PARAM);
2041 	rtw_write8(rtwdev, REG_PIFS, WLAN_PIFS_TIME);
2042 	rtw_write8_clr(rtwdev, REG_TX_PTCL_CTRL + 1, BIT_SIFS_BK_EN >> 8);
2043 	rtw_write8_set(rtwdev, REG_RD_CTRL + 1,
2044 		       (BIT_DIS_TXOP_CFE | BIT_DIS_LSIG_CFE |
2045 			BIT_DIS_STBC_CFE) >> 8);
2046 
2047 	/* MAC clock configuration */
2048 	rtw_write32_clr(rtwdev, REG_AFE_CTRL1, BIT_MAC_CLK_SEL);
2049 	rtw_write8(rtwdev, REG_USTIME_TSF, MAC_CLK_SPEED);
2050 	rtw_write8(rtwdev, REG_USTIME_EDCA, MAC_CLK_SPEED);
2051 
2052 	rtw_write8_set(rtwdev, REG_MISC_CTRL,
2053 		       BIT_EN_FREE_CNT | BIT_DIS_SECOND_CCA);
2054 	rtw_write8_clr(rtwdev, REG_TIMER0_SRC_SEL, BIT_TSFT_SEL_TIMER0);
2055 	rtw_write16(rtwdev, REG_TXPAUSE, 0x0000);
2056 	rtw_write8(rtwdev, REG_SLOT, WLAN_SLOT_TIME);
2057 	rtw_write32(rtwdev, REG_RD_NAV_NXT, WLAN_NAV_CFG);
2058 	rtw_write16(rtwdev, REG_RXTSF_OFFSET_CCK, WLAN_RX_TSF_CFG);
2059 	/* Set beacon cotnrol - enable TSF and other related functions */
2060 	rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2061 	/* Set send beacon related registers */
2062 	rtw_write32(rtwdev, REG_TBTT_PROHIBIT, WLAN_TBTT_TIME);
2063 	rtw_write8(rtwdev, REG_DRVERLYINT, WLAN_DRV_EARLY_INT);
2064 	rtw_write8(rtwdev, REG_BCN_CTRL_CLINT0, WLAN_BCN_CTRL_CLT0);
2065 	rtw_write8(rtwdev, REG_BCNDMATIM, WLAN_BCN_DMA_TIME);
2066 	rtw_write8(rtwdev, REG_BCN_MAX_ERR, WLAN_BCN_MAX_ERR);
2067 
2068 	/* WMAC configuration */
2069 	rtw_write32(rtwdev, REG_MAR, WLAN_MULTI_ADDR);
2070 	rtw_write32(rtwdev, REG_MAR + 4, WLAN_MULTI_ADDR);
2071 	rtw_write8(rtwdev, REG_BBPSF_CTRL + 2, WLAN_RESP_TXRATE);
2072 	rtw_write8(rtwdev, REG_ACKTO, WLAN_ACK_TO);
2073 	rtw_write8(rtwdev, REG_ACKTO_CCK, WLAN_ACK_TO_CCK);
2074 	rtw_write16(rtwdev, REG_EIFS, WLAN_EIFS_DUR_TUNE);
2075 	rtw_write8(rtwdev, REG_NAV_CTRL + 2, WLAN_NAV_MAX);
2076 	rtw_write8(rtwdev, REG_WMAC_TRXPTCL_CTL_H  + 2, WLAN_BAR_ACK_TYPE);
2077 	rtw_write32(rtwdev, REG_RXFLTMAP0, WLAN_RX_FILTER0);
2078 	rtw_write16(rtwdev, REG_RXFLTMAP2, WLAN_RX_FILTER2);
2079 	rtw_write32(rtwdev, REG_RCR, WLAN_RCR_CFG);
2080 	rtw_write8(rtwdev, REG_RX_PKT_LIMIT, WLAN_RXPKT_MAX_SZ_512);
2081 	rtw_write8(rtwdev, REG_TCR + 2, WLAN_TX_FUNC_CFG2);
2082 	rtw_write8(rtwdev, REG_TCR + 1, WLAN_TX_FUNC_CFG1);
2083 	rtw_write32_set(rtwdev, REG_GENERAL_OPTION, BIT_DUMMY_FCS_READY_MASK_EN);
2084 	rtw_write32(rtwdev, REG_WMAC_OPTION_FUNCTION + 8, WLAN_MAC_OPT_FUNC2);
2085 	rtw_write8(rtwdev, REG_WMAC_OPTION_FUNCTION_1, WLAN_MAC_OPT_NORM_FUNC1);
2086 
2087 	/* init low power */
2088 	value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL + 2) & 0xF00F;
2089 	value16 |= (BIT_RXGCK_VHT_FIFOTHR(1) | BIT_RXGCK_HT_FIFOTHR(1) |
2090 		    BIT_RXGCK_OFDM_FIFOTHR(1) | BIT_RXGCK_CCK_FIFOTHR(1)) >> 16;
2091 	rtw_write16(rtwdev, REG_RXPSF_CTRL + 2, value16);
2092 	value16 = 0;
2093 	value16 = BIT_SET_RXPSF_PKTLENTHR(value16, 1);
2094 	value16 |= BIT_RXPSF_CTRLEN | BIT_RXPSF_VHTCHKEN | BIT_RXPSF_HTCHKEN
2095 		| BIT_RXPSF_OFDMCHKEN | BIT_RXPSF_CCKCHKEN
2096 		| BIT_RXPSF_OFDMRST;
2097 	rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2098 	rtw_write32(rtwdev, REG_RXPSF_TYPE_CTRL, 0xFFFFFFFF);
2099 	/* rx ignore configuration */
2100 	value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL);
2101 	value16 &= ~(BIT_RXPSF_MHCHKEN | BIT_RXPSF_CCKRST |
2102 		     BIT_RXPSF_CONT_ERRCHKEN);
2103 	value16 = BIT_SET_RXPSF_ERRTHR(value16, 0x07);
2104 	rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2105 	rtw_write8_set(rtwdev, REG_SND_PTCL_CTRL,
2106 		       BIT_DIS_CHK_VHTSIGB_CRC);
2107 
2108 	/* Interrupt migration configuration */
2109 	rtw_write32(rtwdev, REG_INT_MIG, WLAN_MAC_INT_MIG_CFG);
2110 
2111 	return 0;
2112 }
2113 
2114 #define FWCD_SIZE_REG_8822C 0x2000
2115 #define FWCD_SIZE_DMEM_8822C 0x10000
2116 #define FWCD_SIZE_IMEM_8822C 0x10000
2117 #define FWCD_SIZE_EMEM_8822C 0x20000
2118 #define FWCD_SIZE_ROM_8822C 0x10000
2119 
2120 static const u32 __fwcd_segs_8822c[] = {
2121 	FWCD_SIZE_REG_8822C,
2122 	FWCD_SIZE_DMEM_8822C,
2123 	FWCD_SIZE_IMEM_8822C,
2124 	FWCD_SIZE_EMEM_8822C,
2125 	FWCD_SIZE_ROM_8822C,
2126 };
2127 
2128 static const struct rtw_fwcd_segs rtw8822c_fwcd_segs = {
2129 	.segs = __fwcd_segs_8822c,
2130 	.num = ARRAY_SIZE(__fwcd_segs_8822c),
2131 };
2132 
rtw8822c_dump_fw_crash(struct rtw_dev * rtwdev)2133 static int rtw8822c_dump_fw_crash(struct rtw_dev *rtwdev)
2134 {
2135 #define __dump_fw_8822c(_dev, _mem) \
2136 	rtw_dump_fw(_dev, OCPBASE_ ## _mem ## _88XX, \
2137 		    FWCD_SIZE_ ## _mem ## _8822C, RTW_FWCD_ ## _mem)
2138 	int ret;
2139 
2140 	ret = rtw_dump_reg(rtwdev, 0x0, FWCD_SIZE_REG_8822C);
2141 	if (ret)
2142 		return ret;
2143 	ret = __dump_fw_8822c(rtwdev, DMEM);
2144 	if (ret)
2145 		return ret;
2146 	ret = __dump_fw_8822c(rtwdev, IMEM);
2147 	if (ret)
2148 		return ret;
2149 	ret = __dump_fw_8822c(rtwdev, EMEM);
2150 	if (ret)
2151 		return ret;
2152 	ret = __dump_fw_8822c(rtwdev, ROM);
2153 	if (ret)
2154 		return ret;
2155 
2156 	return 0;
2157 
2158 #undef __dump_fw_8822c
2159 }
2160 
rtw8822c_rstb_3wire(struct rtw_dev * rtwdev,bool enable)2161 static void rtw8822c_rstb_3wire(struct rtw_dev *rtwdev, bool enable)
2162 {
2163 	if (enable) {
2164 		rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x1);
2165 		rtw_write32_mask(rtwdev, REG_ANAPAR_A, BIT_ANAPAR_UPDATE, 0x1);
2166 		rtw_write32_mask(rtwdev, REG_ANAPAR_B, BIT_ANAPAR_UPDATE, 0x1);
2167 	} else {
2168 		rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x0);
2169 	}
2170 }
2171 
rtw8822c_set_channel_rf(struct rtw_dev * rtwdev,u8 channel,u8 bw)2172 static void rtw8822c_set_channel_rf(struct rtw_dev *rtwdev, u8 channel, u8 bw)
2173 {
2174 #define RF18_BAND_MASK		(BIT(16) | BIT(9) | BIT(8))
2175 #define RF18_BAND_2G		(0)
2176 #define RF18_BAND_5G		(BIT(16) | BIT(8))
2177 #define RF18_CHANNEL_MASK	(MASKBYTE0)
2178 #define RF18_RFSI_MASK		(BIT(18) | BIT(17))
2179 #define RF18_RFSI_GE_CH80	(BIT(17))
2180 #define RF18_RFSI_GT_CH140	(BIT(18))
2181 #define RF18_BW_MASK		(BIT(13) | BIT(12))
2182 #define RF18_BW_20M		(BIT(13) | BIT(12))
2183 #define RF18_BW_40M		(BIT(13))
2184 #define RF18_BW_80M		(BIT(12))
2185 
2186 	u32 rf_reg18 = 0;
2187 	u32 rf_rxbb = 0;
2188 
2189 	rf_reg18 = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
2190 
2191 	rf_reg18 &= ~(RF18_BAND_MASK | RF18_CHANNEL_MASK | RF18_RFSI_MASK |
2192 		      RF18_BW_MASK);
2193 
2194 	rf_reg18 |= (IS_CH_2G_BAND(channel) ? RF18_BAND_2G : RF18_BAND_5G);
2195 	rf_reg18 |= (channel & RF18_CHANNEL_MASK);
2196 	if (IS_CH_5G_BAND_4(channel))
2197 		rf_reg18 |= RF18_RFSI_GT_CH140;
2198 	else if (IS_CH_5G_BAND_3(channel))
2199 		rf_reg18 |= RF18_RFSI_GE_CH80;
2200 
2201 	switch (bw) {
2202 	case RTW_CHANNEL_WIDTH_5:
2203 	case RTW_CHANNEL_WIDTH_10:
2204 	case RTW_CHANNEL_WIDTH_20:
2205 	default:
2206 		rf_reg18 |= RF18_BW_20M;
2207 		rf_rxbb = 0x18;
2208 		break;
2209 	case RTW_CHANNEL_WIDTH_40:
2210 		/* RF bandwidth */
2211 		rf_reg18 |= RF18_BW_40M;
2212 		rf_rxbb = 0x10;
2213 		break;
2214 	case RTW_CHANNEL_WIDTH_80:
2215 		rf_reg18 |= RF18_BW_80M;
2216 		rf_rxbb = 0x8;
2217 		break;
2218 	}
2219 
2220 	rtw8822c_rstb_3wire(rtwdev, false);
2221 
2222 	rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x01);
2223 	rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWA, 0x1f, 0x12);
2224 	rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWD0, 0xfffff, rf_rxbb);
2225 	rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x00);
2226 
2227 	rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x01);
2228 	rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWA, 0x1f, 0x12);
2229 	rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWD0, 0xfffff, rf_rxbb);
2230 	rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x00);
2231 
2232 	rtw_write_rf(rtwdev, RF_PATH_A, RF_CFGCH, RFREG_MASK, rf_reg18);
2233 	rtw_write_rf(rtwdev, RF_PATH_B, RF_CFGCH, RFREG_MASK, rf_reg18);
2234 
2235 	rtw8822c_rstb_3wire(rtwdev, true);
2236 }
2237 
rtw8822c_toggle_igi(struct rtw_dev * rtwdev)2238 static void rtw8822c_toggle_igi(struct rtw_dev *rtwdev)
2239 {
2240 	u32 igi;
2241 
2242 	igi = rtw_read32_mask(rtwdev, REG_RXIGI, 0x7f);
2243 	rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi - 2);
2244 	rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi - 2);
2245 	rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi);
2246 	rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi);
2247 }
2248 
rtw8822c_set_channel_bb(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_ch_idx)2249 static void rtw8822c_set_channel_bb(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2250 				    u8 primary_ch_idx)
2251 {
2252 	if (IS_CH_2G_BAND(channel)) {
2253 		rtw_write32_clr(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2254 		rtw_write32_set(rtwdev, REG_TXF4, BIT(20));
2255 		rtw_write32_clr(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2256 		rtw_write32_clr(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2257 		rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0xF);
2258 
2259 		switch (bw) {
2260 		case RTW_CHANNEL_WIDTH_20:
2261 			rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2262 					 0x5);
2263 			rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2264 					 0x5);
2265 			rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2266 					 0x6);
2267 			rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2268 					 0x6);
2269 			break;
2270 		case RTW_CHANNEL_WIDTH_40:
2271 			rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2272 					 0x4);
2273 			rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2274 					 0x4);
2275 			rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2276 					 0x0);
2277 			rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2278 					 0x0);
2279 			break;
2280 		}
2281 		if (channel == 13 || channel == 14)
2282 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x969);
2283 		else if (channel == 11 || channel == 12)
2284 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x96a);
2285 		else
2286 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x9aa);
2287 		if (channel == 14) {
2288 			rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x3da0);
2289 			rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2290 					 0x4962c931);
2291 			rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x6aa3);
2292 			rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xaa7b);
2293 			rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xf3d7);
2294 			rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD, 0x0);
2295 			rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2296 					 0xff012455);
2297 			rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD, 0xffff);
2298 		} else {
2299 			rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x5284);
2300 			rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2301 					 0x3e18fec8);
2302 			rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x0a88);
2303 			rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xacc4);
2304 			rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xc8b2);
2305 			rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD,
2306 					 0x00faf0de);
2307 			rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2308 					 0x00122344);
2309 			rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD,
2310 					 0x0fffffff);
2311 		}
2312 		if (channel == 13)
2313 			rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2314 		else
2315 			rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x1);
2316 	} else if (IS_CH_5G_BAND(channel)) {
2317 		rtw_write32_set(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2318 		rtw_write32_set(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2319 		rtw_write32_set(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2320 		rtw_write32_clr(rtwdev, REG_TXF4, BIT(20));
2321 		rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0x22);
2322 		rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2323 		if (IS_CH_5G_BAND_1(channel) || IS_CH_5G_BAND_2(channel)) {
2324 			rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2325 					 0x1);
2326 			rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2327 					 0x1);
2328 		} else if (IS_CH_5G_BAND_3(channel)) {
2329 			rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2330 					 0x2);
2331 			rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2332 					 0x2);
2333 		} else if (IS_CH_5G_BAND_4(channel)) {
2334 			rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2335 					 0x3);
2336 			rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2337 					 0x3);
2338 		}
2339 
2340 		if (channel >= 36 && channel <= 51)
2341 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x494);
2342 		else if (channel >= 52 && channel <= 55)
2343 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x493);
2344 		else if (channel >= 56 && channel <= 111)
2345 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x453);
2346 		else if (channel >= 112 && channel <= 119)
2347 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x452);
2348 		else if (channel >= 120 && channel <= 172)
2349 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x412);
2350 		else if (channel >= 173 && channel <= 177)
2351 			rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x411);
2352 	}
2353 
2354 	switch (bw) {
2355 	case RTW_CHANNEL_WIDTH_20:
2356 		rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x19B);
2357 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2358 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x0);
2359 		rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x7);
2360 		rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x6);
2361 		rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2362 		rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2363 		rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2364 		break;
2365 	case RTW_CHANNEL_WIDTH_40:
2366 		rtw_write32_mask(rtwdev, REG_CCKSB, BIT(4),
2367 				 (primary_ch_idx == RTW_SC_20_UPPER ? 1 : 0));
2368 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x5);
2369 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2370 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2371 				 (primary_ch_idx | (primary_ch_idx << 4)));
2372 		rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x1);
2373 		rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2374 		rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2375 		break;
2376 	case RTW_CHANNEL_WIDTH_80:
2377 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0xa);
2378 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2379 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2380 				 (primary_ch_idx | (primary_ch_idx << 4)));
2381 		rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x6);
2382 		rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2383 		break;
2384 	case RTW_CHANNEL_WIDTH_5:
2385 		rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2386 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2387 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x1);
2388 		rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x4);
2389 		rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x4);
2390 		rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2391 		rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2392 		rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2393 		break;
2394 	case RTW_CHANNEL_WIDTH_10:
2395 		rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2396 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2397 		rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x2);
2398 		rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x6);
2399 		rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x5);
2400 		rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2401 		rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2402 		rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2403 		break;
2404 	}
2405 }
2406 
rtw8822c_set_channel(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_chan_idx)2407 static void rtw8822c_set_channel(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2408 				 u8 primary_chan_idx)
2409 {
2410 	rtw8822c_set_channel_bb(rtwdev, channel, bw, primary_chan_idx);
2411 	rtw_set_channel_mac(rtwdev, channel, bw, primary_chan_idx);
2412 	rtw8822c_set_channel_rf(rtwdev, channel, bw);
2413 	rtw8822c_toggle_igi(rtwdev);
2414 }
2415 
rtw8822c_config_cck_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2416 static void rtw8822c_config_cck_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2417 {
2418 	if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2419 		rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x0);
2420 		rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x0);
2421 	} else if (rx_path == BB_PATH_AB) {
2422 		rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x1);
2423 		rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x1);
2424 	}
2425 
2426 	if (rx_path == BB_PATH_A)
2427 		rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x0);
2428 	else if (rx_path == BB_PATH_B)
2429 		rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x5);
2430 	else if (rx_path == BB_PATH_AB)
2431 		rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x1);
2432 }
2433 
rtw8822c_config_ofdm_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2434 static void rtw8822c_config_ofdm_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2435 {
2436 	if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2437 		rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x0);
2438 		rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x0);
2439 		rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x0);
2440 		rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x0);
2441 		rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x0);
2442 	} else if (rx_path == BB_PATH_AB) {
2443 		rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x1);
2444 		rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x1);
2445 		rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x1);
2446 		rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x1);
2447 		rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x1);
2448 	}
2449 
2450 	rtw_write32_mask(rtwdev, 0x824, 0x0f000000, rx_path);
2451 	rtw_write32_mask(rtwdev, 0x824, 0x000f0000, rx_path);
2452 }
2453 
rtw8822c_config_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2454 static void rtw8822c_config_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2455 {
2456 	rtw8822c_config_cck_rx_path(rtwdev, rx_path);
2457 	rtw8822c_config_ofdm_rx_path(rtwdev, rx_path);
2458 }
2459 
rtw8822c_config_cck_tx_path(struct rtw_dev * rtwdev,u8 tx_path,bool is_tx2_path)2460 static void rtw8822c_config_cck_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2461 					bool is_tx2_path)
2462 {
2463 	if (tx_path == BB_PATH_A) {
2464 		rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2465 	} else if (tx_path == BB_PATH_B) {
2466 		rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x4);
2467 	} else {
2468 		if (is_tx2_path)
2469 			rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0xc);
2470 		else
2471 			rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2472 	}
2473 	rtw8822c_bb_reset(rtwdev);
2474 }
2475 
rtw8822c_config_ofdm_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss)2476 static void rtw8822c_config_ofdm_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2477 					 enum rtw_bb_path tx_path_sel_1ss)
2478 {
2479 	if (tx_path == BB_PATH_A) {
2480 		rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x11);
2481 		rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2482 	} else if (tx_path == BB_PATH_B) {
2483 		rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x12);
2484 		rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2485 	} else {
2486 		if (tx_path_sel_1ss == BB_PATH_AB) {
2487 			rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x33);
2488 			rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0404);
2489 		} else if (tx_path_sel_1ss == BB_PATH_B) {
2490 			rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x32);
2491 			rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2492 		} else if (tx_path_sel_1ss == BB_PATH_A) {
2493 			rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x31);
2494 			rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2495 		}
2496 	}
2497 	rtw8822c_bb_reset(rtwdev);
2498 }
2499 
rtw8822c_config_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss,enum rtw_bb_path tx_path_cck,bool is_tx2_path)2500 static void rtw8822c_config_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2501 				    enum rtw_bb_path tx_path_sel_1ss,
2502 				    enum rtw_bb_path tx_path_cck,
2503 				    bool is_tx2_path)
2504 {
2505 	rtw8822c_config_cck_tx_path(rtwdev, tx_path_cck, is_tx2_path);
2506 	rtw8822c_config_ofdm_tx_path(rtwdev, tx_path, tx_path_sel_1ss);
2507 	rtw8822c_bb_reset(rtwdev);
2508 }
2509 
rtw8822c_config_trx_mode(struct rtw_dev * rtwdev,u8 tx_path,u8 rx_path,bool is_tx2_path)2510 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
2511 				     u8 rx_path, bool is_tx2_path)
2512 {
2513 	if ((tx_path | rx_path) & BB_PATH_A)
2514 		rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x33312);
2515 	else
2516 		rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x11111);
2517 	if ((tx_path | rx_path) & BB_PATH_B)
2518 		rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x33312);
2519 	else
2520 		rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x11111);
2521 
2522 	rtw8822c_config_rx_path(rtwdev, rx_path);
2523 	rtw8822c_config_tx_path(rtwdev, tx_path, BB_PATH_A, BB_PATH_A,
2524 				is_tx2_path);
2525 
2526 	rtw8822c_toggle_igi(rtwdev);
2527 }
2528 
query_phy_status_page0(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2529 static void query_phy_status_page0(struct rtw_dev *rtwdev, u8 *phy_status,
2530 				   struct rtw_rx_pkt_stat *pkt_stat)
2531 {
2532 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2533 	u8 l_bnd, u_bnd;
2534 	u8 gain_a, gain_b;
2535 	s8 rx_power[RTW_RF_PATH_MAX];
2536 	s8 min_rx_power = -120;
2537 	u8 rssi;
2538 	u8 channel;
2539 	int path;
2540 
2541 	rx_power[RF_PATH_A] = GET_PHY_STAT_P0_PWDB_A(phy_status);
2542 	rx_power[RF_PATH_B] = GET_PHY_STAT_P0_PWDB_B(phy_status);
2543 	l_bnd = dm_info->cck_gi_l_bnd;
2544 	u_bnd = dm_info->cck_gi_u_bnd;
2545 	gain_a = GET_PHY_STAT_P0_GAIN_A(phy_status);
2546 	gain_b = GET_PHY_STAT_P0_GAIN_B(phy_status);
2547 	if (gain_a < l_bnd)
2548 		rx_power[RF_PATH_A] += (l_bnd - gain_a) << 1;
2549 	else if (gain_a > u_bnd)
2550 		rx_power[RF_PATH_A] -= (gain_a - u_bnd) << 1;
2551 	if (gain_b < l_bnd)
2552 		rx_power[RF_PATH_B] += (l_bnd - gain_b) << 1;
2553 	else if (gain_b > u_bnd)
2554 		rx_power[RF_PATH_B] -= (gain_b - u_bnd) << 1;
2555 
2556 	rx_power[RF_PATH_A] -= 110;
2557 	rx_power[RF_PATH_B] -= 110;
2558 
2559 	channel = GET_PHY_STAT_P0_CHANNEL(phy_status);
2560 	if (channel == 0)
2561 		channel = rtwdev->hal.current_channel;
2562 	rtw_set_rx_freq_band(pkt_stat, channel);
2563 
2564 	pkt_stat->rx_power[RF_PATH_A] = rx_power[RF_PATH_A];
2565 	pkt_stat->rx_power[RF_PATH_B] = rx_power[RF_PATH_B];
2566 
2567 	for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2568 		rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2569 		dm_info->rssi[path] = rssi;
2570 	}
2571 
2572 	pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 1);
2573 	pkt_stat->bw = RTW_CHANNEL_WIDTH_20;
2574 	pkt_stat->signal_power = max(pkt_stat->rx_power[RF_PATH_A],
2575 				     min_rx_power);
2576 }
2577 
query_phy_status_page1(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2578 static void query_phy_status_page1(struct rtw_dev *rtwdev, u8 *phy_status,
2579 				   struct rtw_rx_pkt_stat *pkt_stat)
2580 {
2581 	struct rtw_path_div *p_div = &rtwdev->dm_path_div;
2582 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2583 	u8 rxsc, bw;
2584 	s8 min_rx_power = -120;
2585 	s8 rx_evm;
2586 	u8 evm_dbm = 0;
2587 	u8 rssi;
2588 	int path;
2589 	u8 channel;
2590 
2591 	if (pkt_stat->rate > DESC_RATE11M && pkt_stat->rate < DESC_RATEMCS0)
2592 		rxsc = GET_PHY_STAT_P1_L_RXSC(phy_status);
2593 	else
2594 		rxsc = GET_PHY_STAT_P1_HT_RXSC(phy_status);
2595 
2596 	if (rxsc >= 9 && rxsc <= 12)
2597 		bw = RTW_CHANNEL_WIDTH_40;
2598 	else if (rxsc >= 13)
2599 		bw = RTW_CHANNEL_WIDTH_80;
2600 	else
2601 		bw = RTW_CHANNEL_WIDTH_20;
2602 
2603 	channel = GET_PHY_STAT_P1_CHANNEL(phy_status);
2604 	rtw_set_rx_freq_band(pkt_stat, channel);
2605 
2606 	pkt_stat->rx_power[RF_PATH_A] = GET_PHY_STAT_P1_PWDB_A(phy_status) - 110;
2607 	pkt_stat->rx_power[RF_PATH_B] = GET_PHY_STAT_P1_PWDB_B(phy_status) - 110;
2608 	pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 2);
2609 	pkt_stat->bw = bw;
2610 	pkt_stat->signal_power = max3(pkt_stat->rx_power[RF_PATH_A],
2611 				      pkt_stat->rx_power[RF_PATH_B],
2612 				      min_rx_power);
2613 
2614 	dm_info->curr_rx_rate = pkt_stat->rate;
2615 
2616 	pkt_stat->rx_evm[RF_PATH_A] = GET_PHY_STAT_P1_RXEVM_A(phy_status);
2617 	pkt_stat->rx_evm[RF_PATH_B] = GET_PHY_STAT_P1_RXEVM_B(phy_status);
2618 
2619 	pkt_stat->rx_snr[RF_PATH_A] = GET_PHY_STAT_P1_RXSNR_A(phy_status);
2620 	pkt_stat->rx_snr[RF_PATH_B] = GET_PHY_STAT_P1_RXSNR_B(phy_status);
2621 
2622 	pkt_stat->cfo_tail[RF_PATH_A] = GET_PHY_STAT_P1_CFO_TAIL_A(phy_status);
2623 	pkt_stat->cfo_tail[RF_PATH_B] = GET_PHY_STAT_P1_CFO_TAIL_B(phy_status);
2624 
2625 	for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2626 		rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2627 		dm_info->rssi[path] = rssi;
2628 		if (path == RF_PATH_A) {
2629 			p_div->path_a_sum += rssi;
2630 			p_div->path_a_cnt++;
2631 		} else if (path == RF_PATH_B) {
2632 			p_div->path_b_sum += rssi;
2633 			p_div->path_b_cnt++;
2634 		}
2635 		dm_info->rx_snr[path] = pkt_stat->rx_snr[path] >> 1;
2636 		dm_info->cfo_tail[path] = (pkt_stat->cfo_tail[path] * 5) >> 1;
2637 
2638 		rx_evm = pkt_stat->rx_evm[path];
2639 
2640 		if (rx_evm < 0) {
2641 			if (rx_evm == S8_MIN)
2642 				evm_dbm = 0;
2643 			else
2644 				evm_dbm = ((u8)-rx_evm >> 1);
2645 		}
2646 		dm_info->rx_evm_dbm[path] = evm_dbm;
2647 	}
2648 	rtw_phy_parsing_cfo(rtwdev, pkt_stat);
2649 }
2650 
query_phy_status(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2651 static void query_phy_status(struct rtw_dev *rtwdev, u8 *phy_status,
2652 			     struct rtw_rx_pkt_stat *pkt_stat)
2653 {
2654 	u8 page;
2655 
2656 	page = *phy_status & 0xf;
2657 
2658 	switch (page) {
2659 	case 0:
2660 		query_phy_status_page0(rtwdev, phy_status, pkt_stat);
2661 		break;
2662 	case 1:
2663 		query_phy_status_page1(rtwdev, phy_status, pkt_stat);
2664 		break;
2665 	default:
2666 		rtw_warn(rtwdev, "unused phy status page (%d)\n", page);
2667 		return;
2668 	}
2669 }
2670 
rtw8822c_query_rx_desc(struct rtw_dev * rtwdev,u8 * rx_desc,struct rtw_rx_pkt_stat * pkt_stat,struct ieee80211_rx_status * rx_status)2671 static void rtw8822c_query_rx_desc(struct rtw_dev *rtwdev, u8 *rx_desc,
2672 				   struct rtw_rx_pkt_stat *pkt_stat,
2673 				   struct ieee80211_rx_status *rx_status)
2674 {
2675 	struct ieee80211_hdr *hdr;
2676 	u32 desc_sz = rtwdev->chip->rx_pkt_desc_sz;
2677 	u8 *phy_status = NULL;
2678 
2679 	memset(pkt_stat, 0, sizeof(*pkt_stat));
2680 
2681 	pkt_stat->phy_status = GET_RX_DESC_PHYST(rx_desc);
2682 	pkt_stat->icv_err = GET_RX_DESC_ICV_ERR(rx_desc);
2683 	pkt_stat->crc_err = GET_RX_DESC_CRC32(rx_desc);
2684 	pkt_stat->decrypted = !GET_RX_DESC_SWDEC(rx_desc) &&
2685 			      GET_RX_DESC_ENC_TYPE(rx_desc) != RX_DESC_ENC_NONE;
2686 	pkt_stat->is_c2h = GET_RX_DESC_C2H(rx_desc);
2687 	pkt_stat->pkt_len = GET_RX_DESC_PKT_LEN(rx_desc);
2688 	pkt_stat->drv_info_sz = GET_RX_DESC_DRV_INFO_SIZE(rx_desc);
2689 	pkt_stat->shift = GET_RX_DESC_SHIFT(rx_desc);
2690 	pkt_stat->rate = GET_RX_DESC_RX_RATE(rx_desc);
2691 	pkt_stat->cam_id = GET_RX_DESC_MACID(rx_desc);
2692 	pkt_stat->ppdu_cnt = GET_RX_DESC_PPDU_CNT(rx_desc);
2693 	pkt_stat->tsf_low = GET_RX_DESC_TSFL(rx_desc);
2694 
2695 	/* drv_info_sz is in unit of 8-bytes */
2696 	pkt_stat->drv_info_sz *= 8;
2697 
2698 	/* c2h cmd pkt's rx/phy status is not interested */
2699 	if (pkt_stat->is_c2h)
2700 		return;
2701 
2702 	hdr = (struct ieee80211_hdr *)(rx_desc + desc_sz + pkt_stat->shift +
2703 				       pkt_stat->drv_info_sz);
2704 	pkt_stat->hdr = hdr;
2705 	if (pkt_stat->phy_status) {
2706 		phy_status = rx_desc + desc_sz + pkt_stat->shift;
2707 		query_phy_status(rtwdev, phy_status, pkt_stat);
2708 	}
2709 
2710 	rtw_rx_fill_rx_status(rtwdev, pkt_stat, hdr, rx_status, phy_status);
2711 }
2712 
2713 static void
rtw8822c_set_write_tx_power_ref(struct rtw_dev * rtwdev,u8 * tx_pwr_ref_cck,u8 * tx_pwr_ref_ofdm)2714 rtw8822c_set_write_tx_power_ref(struct rtw_dev *rtwdev, u8 *tx_pwr_ref_cck,
2715 				u8 *tx_pwr_ref_ofdm)
2716 {
2717 	struct rtw_hal *hal = &rtwdev->hal;
2718 	u32 txref_cck[2] = {0x18a0, 0x41a0};
2719 	u32 txref_ofdm[2] = {0x18e8, 0x41e8};
2720 	u8 path;
2721 
2722 	for (path = 0; path < hal->rf_path_num; path++) {
2723 		rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2724 		rtw_write32_mask(rtwdev, txref_cck[path], 0x7f0000,
2725 				 tx_pwr_ref_cck[path]);
2726 	}
2727 	for (path = 0; path < hal->rf_path_num; path++) {
2728 		rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2729 		rtw_write32_mask(rtwdev, txref_ofdm[path], 0x1fc00,
2730 				 tx_pwr_ref_ofdm[path]);
2731 	}
2732 }
2733 
rtw8822c_set_tx_power_diff(struct rtw_dev * rtwdev,u8 rate,s8 * diff_idx)2734 static void rtw8822c_set_tx_power_diff(struct rtw_dev *rtwdev, u8 rate,
2735 				       s8 *diff_idx)
2736 {
2737 	u32 offset_txagc = 0x3a00;
2738 	u8 rate_idx = rate & 0xfc;
2739 	u8 pwr_idx[4];
2740 	u32 phy_pwr_idx;
2741 	int i;
2742 
2743 	for (i = 0; i < 4; i++)
2744 		pwr_idx[i] = diff_idx[i] & 0x7f;
2745 
2746 	phy_pwr_idx = pwr_idx[0] |
2747 		      (pwr_idx[1] << 8) |
2748 		      (pwr_idx[2] << 16) |
2749 		      (pwr_idx[3] << 24);
2750 
2751 	rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0x0);
2752 	rtw_write32_mask(rtwdev, offset_txagc + rate_idx, MASKDWORD,
2753 			 phy_pwr_idx);
2754 }
2755 
rtw8822c_set_tx_power_index(struct rtw_dev * rtwdev)2756 static void rtw8822c_set_tx_power_index(struct rtw_dev *rtwdev)
2757 {
2758 	struct rtw_hal *hal = &rtwdev->hal;
2759 	u8 rs, rate, j;
2760 	u8 pwr_ref_cck[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATE11M],
2761 			     hal->tx_pwr_tbl[RF_PATH_B][DESC_RATE11M]};
2762 	u8 pwr_ref_ofdm[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATEMCS7],
2763 			      hal->tx_pwr_tbl[RF_PATH_B][DESC_RATEMCS7]};
2764 	s8 diff_a, diff_b;
2765 	u8 pwr_a, pwr_b;
2766 	s8 diff_idx[4];
2767 
2768 	rtw8822c_set_write_tx_power_ref(rtwdev, pwr_ref_cck, pwr_ref_ofdm);
2769 	for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++) {
2770 		for (j = 0; j < rtw_rate_size[rs]; j++) {
2771 			rate = rtw_rate_section[rs][j];
2772 			pwr_a = hal->tx_pwr_tbl[RF_PATH_A][rate];
2773 			pwr_b = hal->tx_pwr_tbl[RF_PATH_B][rate];
2774 			if (rs == 0) {
2775 				diff_a = (s8)pwr_a - (s8)pwr_ref_cck[0];
2776 				diff_b = (s8)pwr_b - (s8)pwr_ref_cck[1];
2777 			} else {
2778 				diff_a = (s8)pwr_a - (s8)pwr_ref_ofdm[0];
2779 				diff_b = (s8)pwr_b - (s8)pwr_ref_ofdm[1];
2780 			}
2781 			diff_idx[rate % 4] = min(diff_a, diff_b);
2782 			if (rate % 4 == 3)
2783 				rtw8822c_set_tx_power_diff(rtwdev, rate - 3,
2784 							   diff_idx);
2785 		}
2786 	}
2787 }
2788 
rtw8822c_set_antenna(struct rtw_dev * rtwdev,u32 antenna_tx,u32 antenna_rx)2789 static int rtw8822c_set_antenna(struct rtw_dev *rtwdev,
2790 				u32 antenna_tx,
2791 				u32 antenna_rx)
2792 {
2793 	struct rtw_hal *hal = &rtwdev->hal;
2794 
2795 	switch (antenna_tx) {
2796 	case BB_PATH_A:
2797 	case BB_PATH_B:
2798 	case BB_PATH_AB:
2799 		break;
2800 	default:
2801 		rtw_warn(rtwdev, "unsupported tx path 0x%x\n", antenna_tx);
2802 		return -EINVAL;
2803 	}
2804 
2805 	/* path B only is not available for RX */
2806 	switch (antenna_rx) {
2807 	case BB_PATH_A:
2808 	case BB_PATH_AB:
2809 		break;
2810 	default:
2811 		rtw_warn(rtwdev, "unsupported rx path 0x%x\n", antenna_rx);
2812 		return -EINVAL;
2813 	}
2814 
2815 	hal->antenna_tx = antenna_tx;
2816 	hal->antenna_rx = antenna_rx;
2817 
2818 	rtw8822c_config_trx_mode(rtwdev, antenna_tx, antenna_rx, false);
2819 
2820 	return 0;
2821 }
2822 
rtw8822c_cfg_ldo25(struct rtw_dev * rtwdev,bool enable)2823 static void rtw8822c_cfg_ldo25(struct rtw_dev *rtwdev, bool enable)
2824 {
2825 	u8 ldo_pwr;
2826 
2827 	ldo_pwr = rtw_read8(rtwdev, REG_ANAPARLDO_POW_MAC);
2828 	ldo_pwr = enable ? ldo_pwr | BIT_LDOE25_PON : ldo_pwr & ~BIT_LDOE25_PON;
2829 	rtw_write8(rtwdev, REG_ANAPARLDO_POW_MAC, ldo_pwr);
2830 }
2831 
rtw8822c_false_alarm_statistics(struct rtw_dev * rtwdev)2832 static void rtw8822c_false_alarm_statistics(struct rtw_dev *rtwdev)
2833 {
2834 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2835 	u32 cck_enable;
2836 	u32 cck_fa_cnt;
2837 	u32 crc32_cnt;
2838 	u32 cca32_cnt;
2839 	u32 ofdm_fa_cnt;
2840 	u32 ofdm_fa_cnt1, ofdm_fa_cnt2, ofdm_fa_cnt3, ofdm_fa_cnt4, ofdm_fa_cnt5;
2841 	u16 parity_fail, rate_illegal, crc8_fail, mcs_fail, sb_search_fail,
2842 	    fast_fsync, crc8_fail_vhta, mcs_fail_vht;
2843 
2844 	cck_enable = rtw_read32(rtwdev, REG_ENCCK) & BIT_CCK_BLK_EN;
2845 	cck_fa_cnt = rtw_read16(rtwdev, REG_CCK_FACNT);
2846 
2847 	ofdm_fa_cnt1 = rtw_read32(rtwdev, REG_OFDM_FACNT1);
2848 	ofdm_fa_cnt2 = rtw_read32(rtwdev, REG_OFDM_FACNT2);
2849 	ofdm_fa_cnt3 = rtw_read32(rtwdev, REG_OFDM_FACNT3);
2850 	ofdm_fa_cnt4 = rtw_read32(rtwdev, REG_OFDM_FACNT4);
2851 	ofdm_fa_cnt5 = rtw_read32(rtwdev, REG_OFDM_FACNT5);
2852 
2853 	parity_fail	= FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt1);
2854 	rate_illegal	= FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt2);
2855 	crc8_fail	= FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt2);
2856 	crc8_fail_vhta	= FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt3);
2857 	mcs_fail	= FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt4);
2858 	mcs_fail_vht	= FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt4);
2859 	fast_fsync	= FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt5);
2860 	sb_search_fail	= FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt5);
2861 
2862 	ofdm_fa_cnt = parity_fail + rate_illegal + crc8_fail + crc8_fail_vhta +
2863 		      mcs_fail + mcs_fail_vht + fast_fsync + sb_search_fail;
2864 
2865 	dm_info->cck_fa_cnt = cck_fa_cnt;
2866 	dm_info->ofdm_fa_cnt = ofdm_fa_cnt;
2867 	dm_info->total_fa_cnt = ofdm_fa_cnt;
2868 	dm_info->total_fa_cnt += cck_enable ? cck_fa_cnt : 0;
2869 
2870 	crc32_cnt = rtw_read32(rtwdev, 0x2c04);
2871 	dm_info->cck_ok_cnt = crc32_cnt & 0xffff;
2872 	dm_info->cck_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2873 	crc32_cnt = rtw_read32(rtwdev, 0x2c14);
2874 	dm_info->ofdm_ok_cnt = crc32_cnt & 0xffff;
2875 	dm_info->ofdm_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2876 	crc32_cnt = rtw_read32(rtwdev, 0x2c10);
2877 	dm_info->ht_ok_cnt = crc32_cnt & 0xffff;
2878 	dm_info->ht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2879 	crc32_cnt = rtw_read32(rtwdev, 0x2c0c);
2880 	dm_info->vht_ok_cnt = crc32_cnt & 0xffff;
2881 	dm_info->vht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2882 
2883 	cca32_cnt = rtw_read32(rtwdev, 0x2c08);
2884 	dm_info->ofdm_cca_cnt = ((cca32_cnt & 0xffff0000) >> 16);
2885 	dm_info->cck_cca_cnt = cca32_cnt & 0xffff;
2886 	dm_info->total_cca_cnt = dm_info->ofdm_cca_cnt;
2887 	if (cck_enable)
2888 		dm_info->total_cca_cnt += dm_info->cck_cca_cnt;
2889 
2890 	rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 0);
2891 	rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 2);
2892 	rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 0);
2893 	rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 2);
2894 
2895 	/* disable rx clk gating to reset counters */
2896 	rtw_write32_clr(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2897 	rtw_write32_set(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2898 	rtw_write32_clr(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2899 	rtw_write32_set(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2900 }
2901 
rtw8822c_do_lck(struct rtw_dev * rtwdev)2902 static void rtw8822c_do_lck(struct rtw_dev *rtwdev)
2903 {
2904 	u32 val;
2905 
2906 	rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2907 	rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0FA);
2908 	fsleep(1);
2909 	rtw_write_rf(rtwdev, RF_PATH_A, RF_AAC_CTRL, RFREG_MASK, 0x80000);
2910 	rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_AAC, RFREG_MASK, 0x80001);
2911 	read_poll_timeout(rtw_read_rf, val, val != 0x1, 1000, 100000,
2912 			  true, rtwdev, RF_PATH_A, RF_AAC_CTRL, 0x1000);
2913 	rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0F8);
2914 	rtw_write_rf(rtwdev, RF_PATH_B, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2915 
2916 	rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2917 	rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x4f000);
2918 	fsleep(1);
2919 	rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2920 }
2921 
rtw8822c_do_iqk(struct rtw_dev * rtwdev)2922 static void rtw8822c_do_iqk(struct rtw_dev *rtwdev)
2923 {
2924 	struct rtw_iqk_para para = {0};
2925 	u8 iqk_chk;
2926 	int ret;
2927 
2928 	para.clear = 1;
2929 	rtw_fw_do_iqk(rtwdev, &para);
2930 
2931 	ret = read_poll_timeout(rtw_read8, iqk_chk, iqk_chk == IQK_DONE_8822C,
2932 				20000, 300000, false, rtwdev, REG_RPT_CIP);
2933 	if (ret)
2934 		rtw_warn(rtwdev, "failed to poll iqk status bit\n");
2935 
2936 	rtw_write8(rtwdev, REG_IQKSTAT, 0x0);
2937 }
2938 
2939 /* for coex */
rtw8822c_coex_cfg_init(struct rtw_dev * rtwdev)2940 static void rtw8822c_coex_cfg_init(struct rtw_dev *rtwdev)
2941 {
2942 	/* enable TBTT nterrupt */
2943 	rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2944 
2945 	/* BT report packet sample rate */
2946 	/* 0x790[5:0]=0x5 */
2947 	rtw_write8_mask(rtwdev, REG_BT_TDMA_TIME, BIT_MASK_SAMPLE_RATE, 0x5);
2948 
2949 	/* enable BT counter statistics */
2950 	rtw_write8(rtwdev, REG_BT_STAT_CTRL, 0x1);
2951 
2952 	/* enable PTA (3-wire function form BT side) */
2953 	rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_BT_PTA_EN);
2954 	rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_PO_BT_PTA_PINS);
2955 
2956 	/* enable PTA (tx/rx signal form WiFi side) */
2957 	rtw_write8_set(rtwdev, REG_QUEUE_CTRL, BIT_PTA_WL_TX_EN);
2958 	/* wl tx signal to PTA not case EDCCA */
2959 	rtw_write8_clr(rtwdev, REG_QUEUE_CTRL, BIT_PTA_EDCCA_EN);
2960 	/* GNT_BT=1 while select both */
2961 	rtw_write16_set(rtwdev, REG_BT_COEX_V2, BIT_GNT_BT_POLARITY);
2962 	/* BT_CCA = ~GNT_WL_BB, not or GNT_BT_BB, LTE_Rx */
2963 	rtw_write8_clr(rtwdev, REG_DUMMY_PAGE4_V1, BIT_BTCCA_CTRL);
2964 
2965 	/* to avoid RF parameter error */
2966 	rtw_write_rf(rtwdev, RF_PATH_B, RF_MODOPT, 0xfffff, 0x40000);
2967 }
2968 
rtw8822c_coex_cfg_gnt_fix(struct rtw_dev * rtwdev)2969 static void rtw8822c_coex_cfg_gnt_fix(struct rtw_dev *rtwdev)
2970 {
2971 	struct rtw_coex *coex = &rtwdev->coex;
2972 	struct rtw_coex_stat *coex_stat = &coex->stat;
2973 	struct rtw_efuse *efuse = &rtwdev->efuse;
2974 	u32 rf_0x1;
2975 
2976 	if (coex_stat->gnt_workaround_state == coex_stat->wl_coex_mode)
2977 		return;
2978 
2979 	coex_stat->gnt_workaround_state = coex_stat->wl_coex_mode;
2980 
2981 	if ((coex_stat->kt_ver == 0 && coex->under_5g) || coex->freerun)
2982 		rf_0x1 = 0x40021;
2983 	else
2984 		rf_0x1 = 0x40000;
2985 
2986 	/* BT at S1 for Shared-Ant */
2987 	if (efuse->share_ant)
2988 		rf_0x1 |= BIT(13);
2989 
2990 	rtw_write_rf(rtwdev, RF_PATH_B, 0x1, 0xfffff, rf_0x1);
2991 
2992 	/* WL-S0 2G RF TRX cannot be masked by GNT_BT
2993 	 * enable "WLS0 BB chage RF mode if GNT_BT = 1" for shared-antenna type
2994 	 * disable:0x1860[3] = 1, enable:0x1860[3] = 0
2995 	 *
2996 	 * enable "DAC off if GNT_WL = 0" for non-shared-antenna
2997 	 * disable 0x1c30[22] = 0,
2998 	 * enable: 0x1c30[22] = 1, 0x1c38[12] = 0, 0x1c38[28] = 1
2999 	 */
3000 	if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3001 		rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
3002 				BIT_ANAPAR_BTPS >> 16, 0);
3003 	} else {
3004 		rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
3005 				BIT_ANAPAR_BTPS >> 16, 1);
3006 		rtw_write8_mask(rtwdev, REG_RSTB_SEL + 1,
3007 				BIT_DAC_OFF_ENABLE, 0);
3008 		rtw_write8_mask(rtwdev, REG_RSTB_SEL + 3,
3009 				BIT_DAC_OFF_ENABLE, 1);
3010 	}
3011 
3012 	/* disable WL-S1 BB chage RF mode if GNT_BT
3013 	 * since RF TRx mask can do it
3014 	 */
3015 	rtw_write8_mask(rtwdev, REG_IGN_GNTBT4,
3016 			BIT_PI_IGNORE_GNT_BT, 1);
3017 
3018 	/* disable WL-S0 BB chage RF mode if wifi is at 5G,
3019 	 * or antenna path is separated
3020 	 */
3021 	if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3022 		rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3023 				BIT_PI_IGNORE_GNT_BT, 1);
3024 		rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3025 				BIT_NOMASK_TXBT_ENABLE, 1);
3026 	} else if (coex_stat->wl_coex_mode == COEX_WLINK_5G ||
3027 	    coex->under_5g || !efuse->share_ant) {
3028 		if (coex_stat->kt_ver >= 3) {
3029 			rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3030 					BIT_PI_IGNORE_GNT_BT, 0);
3031 			rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3032 					BIT_NOMASK_TXBT_ENABLE, 1);
3033 		} else {
3034 			rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3035 					BIT_PI_IGNORE_GNT_BT, 1);
3036 		}
3037 	} else {
3038 		/* shared-antenna */
3039 		rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3040 				BIT_PI_IGNORE_GNT_BT, 0);
3041 		if (coex_stat->kt_ver >= 3) {
3042 			rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3043 					BIT_NOMASK_TXBT_ENABLE, 0);
3044 		}
3045 	}
3046 }
3047 
rtw8822c_coex_cfg_gnt_debug(struct rtw_dev * rtwdev)3048 static void rtw8822c_coex_cfg_gnt_debug(struct rtw_dev *rtwdev)
3049 {
3050 	rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 2, BIT_BTGP_SPI_EN >> 16, 0);
3051 	rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 3, BIT_BTGP_JTAG_EN >> 24, 0);
3052 	rtw_write8_mask(rtwdev, REG_GPIO_MUXCFG + 2, BIT_FSPI_EN >> 16, 0);
3053 	rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 1, BIT_LED1DIS >> 8, 0);
3054 	rtw_write8_mask(rtwdev, REG_SYS_SDIO_CTRL + 3, BIT_DBG_GNT_WL_BT >> 24, 0);
3055 }
3056 
rtw8822c_coex_cfg_rfe_type(struct rtw_dev * rtwdev)3057 static void rtw8822c_coex_cfg_rfe_type(struct rtw_dev *rtwdev)
3058 {
3059 	struct rtw_coex *coex = &rtwdev->coex;
3060 	struct rtw_coex_rfe *coex_rfe = &coex->rfe;
3061 	struct rtw_efuse *efuse = &rtwdev->efuse;
3062 
3063 	coex_rfe->rfe_module_type = rtwdev->efuse.rfe_option;
3064 	coex_rfe->ant_switch_polarity = 0;
3065 	coex_rfe->ant_switch_exist = false;
3066 	coex_rfe->ant_switch_with_bt = false;
3067 	coex_rfe->ant_switch_diversity = false;
3068 
3069 	if (efuse->share_ant)
3070 		coex_rfe->wlg_at_btg = true;
3071 	else
3072 		coex_rfe->wlg_at_btg = false;
3073 
3074 	/* disable LTE coex in wifi side */
3075 	rtw_coex_write_indirect_reg(rtwdev, LTE_COEX_CTRL, BIT_LTE_COEX_EN, 0x0);
3076 	rtw_coex_write_indirect_reg(rtwdev, LTE_WL_TRX_CTRL, MASKLWORD, 0xffff);
3077 	rtw_coex_write_indirect_reg(rtwdev, LTE_BT_TRX_CTRL, MASKLWORD, 0xffff);
3078 }
3079 
rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev * rtwdev,u8 wl_pwr)3080 static void rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev *rtwdev, u8 wl_pwr)
3081 {
3082 	struct rtw_coex *coex = &rtwdev->coex;
3083 	struct rtw_coex_dm *coex_dm = &coex->dm;
3084 
3085 	if (wl_pwr == coex_dm->cur_wl_pwr_lvl)
3086 		return;
3087 
3088 	coex_dm->cur_wl_pwr_lvl = wl_pwr;
3089 }
3090 
rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev * rtwdev,bool low_gain)3091 static void rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev *rtwdev, bool low_gain)
3092 {
3093 	struct rtw_coex *coex = &rtwdev->coex;
3094 	struct rtw_coex_dm *coex_dm = &coex->dm;
3095 
3096 	if (low_gain == coex_dm->cur_wl_rx_low_gain_en)
3097 		return;
3098 
3099 	coex_dm->cur_wl_rx_low_gain_en = low_gain;
3100 
3101 	if (coex_dm->cur_wl_rx_low_gain_en) {
3102 		rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table On!\n");
3103 
3104 		/* set Rx filter corner RCK offset */
3105 		rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x22);
3106 		rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x36);
3107 		rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x22);
3108 		rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x36);
3109 
3110 	} else {
3111 		rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table Off!\n");
3112 
3113 		/* set Rx filter corner RCK offset */
3114 		rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x20);
3115 		rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x0);
3116 		rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x20);
3117 		rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x0);
3118 	}
3119 }
3120 
rtw8822c_bf_enable_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee)3121 static void rtw8822c_bf_enable_bfee_su(struct rtw_dev *rtwdev,
3122 				       struct rtw_vif *vif,
3123 				       struct rtw_bfee *bfee)
3124 {
3125 	u8 csi_rsc = 0;
3126 	u32 tmp6dc;
3127 
3128 	rtw_bf_enable_bfee_su(rtwdev, vif, bfee);
3129 
3130 	tmp6dc = rtw_read32(rtwdev, REG_BBPSF_CTRL) |
3131 			    BIT_WMAC_USE_NDPARATE |
3132 			    (csi_rsc << 13);
3133 	if (vif->net_type == RTW_NET_AP_MODE)
3134 		rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc | BIT(12));
3135 	else
3136 		rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc & ~BIT(12));
3137 
3138 	rtw_write32(rtwdev, REG_CSI_RRSR, 0x550);
3139 }
3140 
rtw8822c_bf_config_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3141 static void rtw8822c_bf_config_bfee_su(struct rtw_dev *rtwdev,
3142 				       struct rtw_vif *vif,
3143 				       struct rtw_bfee *bfee, bool enable)
3144 {
3145 	if (enable)
3146 		rtw8822c_bf_enable_bfee_su(rtwdev, vif, bfee);
3147 	else
3148 		rtw_bf_remove_bfee_su(rtwdev, bfee);
3149 }
3150 
rtw8822c_bf_config_bfee_mu(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3151 static void rtw8822c_bf_config_bfee_mu(struct rtw_dev *rtwdev,
3152 				       struct rtw_vif *vif,
3153 				       struct rtw_bfee *bfee, bool enable)
3154 {
3155 	if (enable)
3156 		rtw_bf_enable_bfee_mu(rtwdev, vif, bfee);
3157 	else
3158 		rtw_bf_remove_bfee_mu(rtwdev, bfee);
3159 }
3160 
rtw8822c_bf_config_bfee(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3161 static void rtw8822c_bf_config_bfee(struct rtw_dev *rtwdev, struct rtw_vif *vif,
3162 				    struct rtw_bfee *bfee, bool enable)
3163 {
3164 	if (bfee->role == RTW_BFEE_SU)
3165 		rtw8822c_bf_config_bfee_su(rtwdev, vif, bfee, enable);
3166 	else if (bfee->role == RTW_BFEE_MU)
3167 		rtw8822c_bf_config_bfee_mu(rtwdev, vif, bfee, enable);
3168 	else
3169 		rtw_warn(rtwdev, "wrong bfee role\n");
3170 }
3171 
3172 struct dpk_cfg_pair {
3173 	u32 addr;
3174 	u32 bitmask;
3175 	u32 data;
3176 };
3177 
rtw8822c_parse_tbl_dpk(struct rtw_dev * rtwdev,const struct rtw_table * tbl)3178 void rtw8822c_parse_tbl_dpk(struct rtw_dev *rtwdev,
3179 			    const struct rtw_table *tbl)
3180 {
3181 	const struct dpk_cfg_pair *p = tbl->data;
3182 	const struct dpk_cfg_pair *end = p + tbl->size / 3;
3183 
3184 	BUILD_BUG_ON(sizeof(struct dpk_cfg_pair) != sizeof(u32) * 3);
3185 
3186 	for (; p < end; p++)
3187 		rtw_write32_mask(rtwdev, p->addr, p->bitmask, p->data);
3188 }
3189 
rtw8822c_dpk_set_gnt_wl(struct rtw_dev * rtwdev,bool is_before_k)3190 static void rtw8822c_dpk_set_gnt_wl(struct rtw_dev *rtwdev, bool is_before_k)
3191 {
3192 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3193 
3194 	if (is_before_k) {
3195 		dpk_info->gnt_control = rtw_read32(rtwdev, 0x70);
3196 		dpk_info->gnt_value = rtw_coex_read_indirect_reg(rtwdev, 0x38);
3197 		rtw_write32_mask(rtwdev, 0x70, BIT(26), 0x1);
3198 		rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKBYTE1, 0x77);
3199 	} else {
3200 		rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKDWORD,
3201 					    dpk_info->gnt_value);
3202 		rtw_write32(rtwdev, 0x70, dpk_info->gnt_control);
3203 	}
3204 }
3205 
3206 static void
rtw8822c_dpk_restore_registers(struct rtw_dev * rtwdev,u32 reg_num,struct rtw_backup_info * bckp)3207 rtw8822c_dpk_restore_registers(struct rtw_dev *rtwdev, u32 reg_num,
3208 			       struct rtw_backup_info *bckp)
3209 {
3210 	rtw_restore_reg(rtwdev, bckp, reg_num);
3211 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3212 	rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0x4);
3213 }
3214 
3215 static void
rtw8822c_dpk_backup_registers(struct rtw_dev * rtwdev,u32 * reg,u32 reg_num,struct rtw_backup_info * bckp)3216 rtw8822c_dpk_backup_registers(struct rtw_dev *rtwdev, u32 *reg,
3217 			      u32 reg_num, struct rtw_backup_info *bckp)
3218 {
3219 	u32 i;
3220 
3221 	for (i = 0; i < reg_num; i++) {
3222 		bckp[i].len = 4;
3223 		bckp[i].reg = reg[i];
3224 		bckp[i].val = rtw_read32(rtwdev, reg[i]);
3225 	}
3226 }
3227 
rtw8822c_dpk_backup_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3228 static void rtw8822c_dpk_backup_rf_registers(struct rtw_dev *rtwdev,
3229 					     u32 *rf_reg,
3230 					     u32 rf_reg_bak[][2])
3231 {
3232 	u32 i;
3233 
3234 	for (i = 0; i < DPK_RF_REG_NUM; i++) {
3235 		rf_reg_bak[i][RF_PATH_A] = rtw_read_rf(rtwdev, RF_PATH_A,
3236 						       rf_reg[i], RFREG_MASK);
3237 		rf_reg_bak[i][RF_PATH_B] = rtw_read_rf(rtwdev, RF_PATH_B,
3238 						       rf_reg[i], RFREG_MASK);
3239 	}
3240 }
3241 
rtw8822c_dpk_reload_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3242 static void rtw8822c_dpk_reload_rf_registers(struct rtw_dev *rtwdev,
3243 					     u32 *rf_reg,
3244 					     u32 rf_reg_bak[][2])
3245 {
3246 	u32 i;
3247 
3248 	for (i = 0; i < DPK_RF_REG_NUM; i++) {
3249 		rtw_write_rf(rtwdev, RF_PATH_A, rf_reg[i], RFREG_MASK,
3250 			     rf_reg_bak[i][RF_PATH_A]);
3251 		rtw_write_rf(rtwdev, RF_PATH_B, rf_reg[i], RFREG_MASK,
3252 			     rf_reg_bak[i][RF_PATH_B]);
3253 	}
3254 }
3255 
rtw8822c_dpk_information(struct rtw_dev * rtwdev)3256 static void rtw8822c_dpk_information(struct rtw_dev *rtwdev)
3257 {
3258 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3259 	u32  reg;
3260 	u8 band_shift;
3261 
3262 	reg = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
3263 
3264 	band_shift = FIELD_GET(BIT(16), reg);
3265 	dpk_info->dpk_band = 1 << band_shift;
3266 	dpk_info->dpk_ch = FIELD_GET(0xff, reg);
3267 	dpk_info->dpk_bw = FIELD_GET(0x3000, reg);
3268 }
3269 
rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev * rtwdev,u8 path)3270 static void rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev *rtwdev, u8 path)
3271 {
3272 	rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3273 	udelay(5);
3274 	rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84801);
3275 	usleep_range(600, 610);
3276 	rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3277 }
3278 
rtw8822c_dpk_dc_corr_check(struct rtw_dev * rtwdev,u8 path)3279 static u8 rtw8822c_dpk_dc_corr_check(struct rtw_dev *rtwdev, u8 path)
3280 {
3281 	u16 dc_i, dc_q;
3282 	u8 corr_idx;
3283 
3284 	rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000900f0);
3285 	dc_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3286 	dc_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(11, 0));
3287 
3288 	if (dc_i & BIT(11))
3289 		dc_i = 0x1000 - dc_i;
3290 	if (dc_q & BIT(11))
3291 		dc_q = 0x1000 - dc_q;
3292 
3293 	rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3294 	corr_idx = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(7, 0));
3295 	rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(15, 8));
3296 
3297 	if (dc_i > 200 || dc_q > 200 || corr_idx < 40 || corr_idx > 65)
3298 		return 1;
3299 	else
3300 		return 0;
3301 
3302 }
3303 
rtw8822c_dpk_tx_pause(struct rtw_dev * rtwdev)3304 static void rtw8822c_dpk_tx_pause(struct rtw_dev *rtwdev)
3305 {
3306 	u8 reg_a, reg_b;
3307 	u16 count = 0;
3308 
3309 	rtw_write8(rtwdev, 0x522, 0xff);
3310 	rtw_write32_mask(rtwdev, 0x1e70, 0xf, 0x2);
3311 
3312 	do {
3313 		reg_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A, 0x00, 0xf0000);
3314 		reg_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B, 0x00, 0xf0000);
3315 		udelay(2);
3316 		count++;
3317 	} while ((reg_a == 2 || reg_b == 2) && count < 2500);
3318 }
3319 
rtw8822c_dpk_mac_bb_setting(struct rtw_dev * rtwdev)3320 static void rtw8822c_dpk_mac_bb_setting(struct rtw_dev *rtwdev)
3321 {
3322 	rtw8822c_dpk_tx_pause(rtwdev);
3323 	rtw_load_table(rtwdev, &rtw8822c_dpk_mac_bb_tbl);
3324 }
3325 
rtw8822c_dpk_afe_setting(struct rtw_dev * rtwdev,bool is_do_dpk)3326 static void rtw8822c_dpk_afe_setting(struct rtw_dev *rtwdev, bool is_do_dpk)
3327 {
3328 	if (is_do_dpk)
3329 		rtw_load_table(rtwdev, &rtw8822c_dpk_afe_is_dpk_tbl);
3330 	else
3331 		rtw_load_table(rtwdev, &rtw8822c_dpk_afe_no_dpk_tbl);
3332 }
3333 
rtw8822c_dpk_pre_setting(struct rtw_dev * rtwdev)3334 static void rtw8822c_dpk_pre_setting(struct rtw_dev *rtwdev)
3335 {
3336 	u8 path;
3337 
3338 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3339 		rtw_write_rf(rtwdev, path, RF_RXAGC_OFFSET, RFREG_MASK, 0x0);
3340 		rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3341 		if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G)
3342 			rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
3343 		else
3344 			rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
3345 		rtw_write32_mask(rtwdev, REG_DPD_LUT0, BIT_GLOSS_DB, 0x4);
3346 		rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x3);
3347 	}
3348 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3349 	rtw_write32(rtwdev, REG_DPD_CTL11, 0x3b23170b);
3350 	rtw_write32(rtwdev, REG_DPD_CTL12, 0x775f5347);
3351 }
3352 
rtw8822c_dpk_rf_setting(struct rtw_dev * rtwdev,u8 path)3353 static u32 rtw8822c_dpk_rf_setting(struct rtw_dev *rtwdev, u8 path)
3354 {
3355 	u32 ori_txbb;
3356 
3357 	rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50017);
3358 	ori_txbb = rtw_read_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK);
3359 
3360 	rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
3361 	rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_PWR_TRIM, 0x1);
3362 	rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_BB_GAIN, 0x0);
3363 	rtw_write_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK, ori_txbb);
3364 
3365 	if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G) {
3366 		rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x1);
3367 		rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x0);
3368 	} else {
3369 		rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x0);
3370 		rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x6);
3371 		rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
3372 		rtw_write_rf(rtwdev, path, RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0);
3373 	}
3374 
3375 	rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3376 	rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
3377 	rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
3378 
3379 	if (rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80)
3380 		rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x2);
3381 	else
3382 		rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
3383 
3384 	rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT(1), 0x1);
3385 
3386 	usleep_range(100, 110);
3387 
3388 	return ori_txbb & 0x1f;
3389 }
3390 
rtw8822c_dpk_get_cmd(struct rtw_dev * rtwdev,u8 action,u8 path)3391 static u16 rtw8822c_dpk_get_cmd(struct rtw_dev *rtwdev, u8 action, u8 path)
3392 {
3393 	u16 cmd;
3394 	u8 bw = rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80 ? 2 : 0;
3395 
3396 	switch (action) {
3397 	case RTW_DPK_GAIN_LOSS:
3398 		cmd = 0x14 + path;
3399 		break;
3400 	case RTW_DPK_DO_DPK:
3401 		cmd = 0x16 + path + bw;
3402 		break;
3403 	case RTW_DPK_DPK_ON:
3404 		cmd = 0x1a + path;
3405 		break;
3406 	case RTW_DPK_DAGC:
3407 		cmd = 0x1c + path + bw;
3408 		break;
3409 	default:
3410 		return 0;
3411 	}
3412 
3413 	return (cmd << 8) | 0x48;
3414 }
3415 
rtw8822c_dpk_one_shot(struct rtw_dev * rtwdev,u8 path,u8 action)3416 static u8 rtw8822c_dpk_one_shot(struct rtw_dev *rtwdev, u8 path, u8 action)
3417 {
3418 	u16 dpk_cmd;
3419 	u8 result = 0;
3420 
3421 	rtw8822c_dpk_set_gnt_wl(rtwdev, true);
3422 
3423 	if (action == RTW_DPK_CAL_PWR) {
3424 		rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x1);
3425 		rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x0);
3426 		rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3427 		msleep(10);
3428 		if (!check_hw_ready(rtwdev, REG_STAT_RPT, BIT(31), 0x1)) {
3429 			result = 1;
3430 			rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3431 		}
3432 	} else {
3433 		rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3434 				 0x8 | (path << 1));
3435 		rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3436 
3437 		dpk_cmd = rtw8822c_dpk_get_cmd(rtwdev, action, path);
3438 		rtw_write32(rtwdev, REG_NCTL0, dpk_cmd);
3439 		rtw_write32(rtwdev, REG_NCTL0, dpk_cmd + 1);
3440 		msleep(10);
3441 		if (!check_hw_ready(rtwdev, 0x2d9c, 0xff, 0x55)) {
3442 			result = 1;
3443 			rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3444 		}
3445 		rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3446 				 0x8 | (path << 1));
3447 		rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3448 	}
3449 
3450 	rtw8822c_dpk_set_gnt_wl(rtwdev, false);
3451 
3452 	rtw_write8(rtwdev, 0x1b10, 0x0);
3453 
3454 	return result;
3455 }
3456 
rtw8822c_dpk_dgain_read(struct rtw_dev * rtwdev,u8 path)3457 static u16 rtw8822c_dpk_dgain_read(struct rtw_dev *rtwdev, u8 path)
3458 {
3459 	u16 dgain;
3460 
3461 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3462 	rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, 0x00ff0000, 0x0);
3463 
3464 	dgain = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3465 
3466 	return dgain;
3467 }
3468 
rtw8822c_dpk_thermal_read(struct rtw_dev * rtwdev,u8 path)3469 static u8 rtw8822c_dpk_thermal_read(struct rtw_dev *rtwdev, u8 path)
3470 {
3471 	rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3472 	rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x0);
3473 	rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3474 	udelay(15);
3475 
3476 	return (u8)rtw_read_rf(rtwdev, path, RF_T_METER, 0x0007e);
3477 }
3478 
rtw8822c_dpk_pas_read(struct rtw_dev * rtwdev,u8 path)3479 static u32 rtw8822c_dpk_pas_read(struct rtw_dev *rtwdev, u8 path)
3480 {
3481 	u32 i_val, q_val;
3482 
3483 	rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3484 	rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3485 	rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060001);
3486 	rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3487 	rtw_write32(rtwdev, 0x1b4c, 0x00080000);
3488 
3489 	q_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD);
3490 	i_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD);
3491 
3492 	if (i_val & BIT(15))
3493 		i_val = 0x10000 - i_val;
3494 	if (q_val & BIT(15))
3495 		q_val = 0x10000 - q_val;
3496 
3497 	rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3498 
3499 	return i_val * i_val + q_val * q_val;
3500 }
3501 
rtw8822c_psd_log2base(u32 val)3502 static u32 rtw8822c_psd_log2base(u32 val)
3503 {
3504 	u32 tmp, val_integerd_b, tindex;
3505 	u32 result, val_fractiond_b;
3506 	u32 table_fraction[21] = {0, 432, 332, 274, 232, 200, 174,
3507 				  151, 132, 115, 100, 86, 74, 62, 51,
3508 				  42, 32, 23, 15, 7, 0};
3509 
3510 	if (val == 0)
3511 		return 0;
3512 
3513 	val_integerd_b = __fls(val) + 1;
3514 
3515 	tmp = (val * 100) / (1 << val_integerd_b);
3516 	tindex = tmp / 5;
3517 
3518 	if (tindex >= ARRAY_SIZE(table_fraction))
3519 		tindex = ARRAY_SIZE(table_fraction) - 1;
3520 
3521 	val_fractiond_b = table_fraction[tindex];
3522 
3523 	result = val_integerd_b * 100 - val_fractiond_b;
3524 
3525 	return result;
3526 }
3527 
rtw8822c_dpk_gainloss_result(struct rtw_dev * rtwdev,u8 path)3528 static u8 rtw8822c_dpk_gainloss_result(struct rtw_dev *rtwdev, u8 path)
3529 {
3530 	u8 result;
3531 
3532 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3533 	rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x1);
3534 	rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060000);
3535 
3536 	result = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, 0x000000f0);
3537 
3538 	rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3539 
3540 	return result;
3541 }
3542 
rtw8822c_dpk_agc_gain_chk(struct rtw_dev * rtwdev,u8 path,u8 limited_pga)3543 static u8 rtw8822c_dpk_agc_gain_chk(struct rtw_dev *rtwdev, u8 path,
3544 				    u8 limited_pga)
3545 {
3546 	u8 result = 0;
3547 	u16 dgain;
3548 
3549 	rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3550 	dgain = rtw8822c_dpk_dgain_read(rtwdev, path);
3551 
3552 	if (dgain > 1535 && !limited_pga)
3553 		return RTW_DPK_GAIN_LESS;
3554 	else if (dgain < 768 && !limited_pga)
3555 		return RTW_DPK_GAIN_LARGE;
3556 	else
3557 		return result;
3558 }
3559 
rtw8822c_dpk_agc_loss_chk(struct rtw_dev * rtwdev,u8 path)3560 static u8 rtw8822c_dpk_agc_loss_chk(struct rtw_dev *rtwdev, u8 path)
3561 {
3562 	u32 loss, loss_db;
3563 
3564 	loss = rtw8822c_dpk_pas_read(rtwdev, path);
3565 	if (loss < 0x4000000)
3566 		return RTW_DPK_GL_LESS;
3567 	loss_db = 3 * rtw8822c_psd_log2base(loss >> 13) - 3870;
3568 
3569 	if (loss_db > 1000)
3570 		return RTW_DPK_GL_LARGE;
3571 	else if (loss_db < 250)
3572 		return RTW_DPK_GL_LESS;
3573 	else
3574 		return RTW_DPK_AGC_OUT;
3575 }
3576 
3577 struct rtw8822c_dpk_data {
3578 	u8 txbb;
3579 	u8 pga;
3580 	u8 limited_pga;
3581 	u8 agc_cnt;
3582 	bool loss_only;
3583 	bool gain_only;
3584 	u8 path;
3585 };
3586 
rtw8822c_gain_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3587 static u8 rtw8822c_gain_check_state(struct rtw_dev *rtwdev,
3588 				    struct rtw8822c_dpk_data *data)
3589 {
3590 	u8 state;
3591 
3592 	data->txbb = (u8)rtw_read_rf(rtwdev, data->path, RF_TX_GAIN,
3593 				     BIT_GAIN_TXBB);
3594 	data->pga = (u8)rtw_read_rf(rtwdev, data->path, RF_MODE_TRXAGC,
3595 				    BIT_RXAGC);
3596 
3597 	if (data->loss_only) {
3598 		state = RTW_DPK_LOSS_CHECK;
3599 		goto check_end;
3600 	}
3601 
3602 	state = rtw8822c_dpk_agc_gain_chk(rtwdev, data->path,
3603 					  data->limited_pga);
3604 	if (state == RTW_DPK_GAIN_CHECK && data->gain_only)
3605 		state = RTW_DPK_AGC_OUT;
3606 	else if (state == RTW_DPK_GAIN_CHECK)
3607 		state = RTW_DPK_LOSS_CHECK;
3608 
3609 check_end:
3610 	data->agc_cnt++;
3611 	if (data->agc_cnt >= 6)
3612 		state = RTW_DPK_AGC_OUT;
3613 
3614 	return state;
3615 }
3616 
rtw8822c_gain_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3617 static u8 rtw8822c_gain_large_state(struct rtw_dev *rtwdev,
3618 				    struct rtw8822c_dpk_data *data)
3619 {
3620 	u8 pga = data->pga;
3621 
3622 	if (pga > 0xe)
3623 		rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3624 	else if (pga > 0xb && pga < 0xf)
3625 		rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0);
3626 	else if (pga < 0xc)
3627 		data->limited_pga = 1;
3628 
3629 	return RTW_DPK_GAIN_CHECK;
3630 }
3631 
rtw8822c_gain_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3632 static u8 rtw8822c_gain_less_state(struct rtw_dev *rtwdev,
3633 				   struct rtw8822c_dpk_data *data)
3634 {
3635 	u8 pga = data->pga;
3636 
3637 	if (pga < 0xc)
3638 		rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3639 	else if (pga > 0xb && pga < 0xf)
3640 		rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3641 	else if (pga > 0xe)
3642 		data->limited_pga = 1;
3643 
3644 	return RTW_DPK_GAIN_CHECK;
3645 }
3646 
rtw8822c_gl_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data,u8 is_large)3647 static u8 rtw8822c_gl_state(struct rtw_dev *rtwdev,
3648 			    struct rtw8822c_dpk_data *data, u8 is_large)
3649 {
3650 	u8 txbb_bound[] = {0x1f, 0};
3651 
3652 	if (data->txbb == txbb_bound[is_large])
3653 		return RTW_DPK_AGC_OUT;
3654 
3655 	if (is_large == 1)
3656 		data->txbb -= 2;
3657 	else
3658 		data->txbb += 3;
3659 
3660 	rtw_write_rf(rtwdev, data->path, RF_TX_GAIN, BIT_GAIN_TXBB, data->txbb);
3661 	data->limited_pga = 0;
3662 
3663 	return RTW_DPK_GAIN_CHECK;
3664 }
3665 
rtw8822c_gl_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3666 static u8 rtw8822c_gl_large_state(struct rtw_dev *rtwdev,
3667 				  struct rtw8822c_dpk_data *data)
3668 {
3669 	return rtw8822c_gl_state(rtwdev, data, 1);
3670 }
3671 
rtw8822c_gl_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3672 static u8 rtw8822c_gl_less_state(struct rtw_dev *rtwdev,
3673 				 struct rtw8822c_dpk_data *data)
3674 {
3675 	return rtw8822c_gl_state(rtwdev, data, 0);
3676 }
3677 
rtw8822c_loss_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3678 static u8 rtw8822c_loss_check_state(struct rtw_dev *rtwdev,
3679 				    struct rtw8822c_dpk_data *data)
3680 {
3681 	u8 path = data->path;
3682 	u8 state;
3683 
3684 	rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_GAIN_LOSS);
3685 	state = rtw8822c_dpk_agc_loss_chk(rtwdev, path);
3686 
3687 	return state;
3688 }
3689 
3690 static u8 (*dpk_state[])(struct rtw_dev *rtwdev,
3691 			  struct rtw8822c_dpk_data *data) = {
3692 	rtw8822c_gain_check_state, rtw8822c_gain_large_state,
3693 	rtw8822c_gain_less_state, rtw8822c_gl_large_state,
3694 	rtw8822c_gl_less_state, rtw8822c_loss_check_state };
3695 
rtw8822c_dpk_pas_agc(struct rtw_dev * rtwdev,u8 path,bool gain_only,bool loss_only)3696 static u8 rtw8822c_dpk_pas_agc(struct rtw_dev *rtwdev, u8 path,
3697 			       bool gain_only, bool loss_only)
3698 {
3699 	struct rtw8822c_dpk_data data = {0};
3700 	u8 (*func)(struct rtw_dev *rtwdev, struct rtw8822c_dpk_data *data);
3701 	u8 state = RTW_DPK_GAIN_CHECK;
3702 
3703 	data.loss_only = loss_only;
3704 	data.gain_only = gain_only;
3705 	data.path = path;
3706 
3707 	for (;;) {
3708 		func = dpk_state[state];
3709 		state = func(rtwdev, &data);
3710 		if (state == RTW_DPK_AGC_OUT)
3711 			break;
3712 	}
3713 
3714 	return data.txbb;
3715 }
3716 
rtw8822c_dpk_coef_iq_check(struct rtw_dev * rtwdev,u16 coef_i,u16 coef_q)3717 static bool rtw8822c_dpk_coef_iq_check(struct rtw_dev *rtwdev,
3718 				       u16 coef_i, u16 coef_q)
3719 {
3720 	if (coef_i == 0x1000 || coef_i == 0x0fff ||
3721 	    coef_q == 0x1000 || coef_q == 0x0fff)
3722 		return true;
3723 
3724 	return false;
3725 }
3726 
rtw8822c_dpk_coef_transfer(struct rtw_dev * rtwdev)3727 static u32 rtw8822c_dpk_coef_transfer(struct rtw_dev *rtwdev)
3728 {
3729 	u32 reg = 0;
3730 	u16 coef_i = 0, coef_q = 0;
3731 
3732 	reg = rtw_read32(rtwdev, REG_STAT_RPT);
3733 
3734 	coef_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD) & 0x1fff;
3735 	coef_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD) & 0x1fff;
3736 
3737 	coef_q = ((0x2000 - coef_q) & 0x1fff) - 1;
3738 
3739 	reg = (coef_i << 16) | coef_q;
3740 
3741 	return reg;
3742 }
3743 
3744 static const u32 rtw8822c_dpk_get_coef_tbl[] = {
3745 	0x000400f0, 0x040400f0, 0x080400f0, 0x010400f0, 0x050400f0,
3746 	0x090400f0, 0x020400f0, 0x060400f0, 0x0a0400f0, 0x030400f0,
3747 	0x070400f0, 0x0b0400f0, 0x0c0400f0, 0x100400f0, 0x0d0400f0,
3748 	0x110400f0, 0x0e0400f0, 0x120400f0, 0x0f0400f0, 0x130400f0,
3749 };
3750 
rtw8822c_dpk_coef_tbl_apply(struct rtw_dev * rtwdev,u8 path)3751 static void rtw8822c_dpk_coef_tbl_apply(struct rtw_dev *rtwdev, u8 path)
3752 {
3753 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3754 	int i;
3755 
3756 	for (i = 0; i < 20; i++) {
3757 		rtw_write32(rtwdev, REG_RXSRAM_CTL,
3758 			    rtw8822c_dpk_get_coef_tbl[i]);
3759 		dpk_info->coef[path][i] = rtw8822c_dpk_coef_transfer(rtwdev);
3760 	}
3761 }
3762 
rtw8822c_dpk_get_coef(struct rtw_dev * rtwdev,u8 path)3763 static void rtw8822c_dpk_get_coef(struct rtw_dev *rtwdev, u8 path)
3764 {
3765 	rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3766 
3767 	if (path == RF_PATH_A) {
3768 		rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x0);
3769 		rtw_write32(rtwdev, REG_DPD_CTL0_S0, 0x30000080);
3770 	} else if (path == RF_PATH_B) {
3771 		rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x1);
3772 		rtw_write32(rtwdev, REG_DPD_CTL0_S1, 0x30000080);
3773 	}
3774 
3775 	rtw8822c_dpk_coef_tbl_apply(rtwdev, path);
3776 }
3777 
rtw8822c_dpk_coef_read(struct rtw_dev * rtwdev,u8 path)3778 static u8 rtw8822c_dpk_coef_read(struct rtw_dev *rtwdev, u8 path)
3779 {
3780 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3781 	u8 addr, result = 1;
3782 	u16 coef_i, coef_q;
3783 
3784 	for (addr = 0; addr < 20; addr++) {
3785 		coef_i = FIELD_GET(0x1fff0000, dpk_info->coef[path][addr]);
3786 		coef_q = FIELD_GET(0x1fff, dpk_info->coef[path][addr]);
3787 
3788 		if (rtw8822c_dpk_coef_iq_check(rtwdev, coef_i, coef_q)) {
3789 			result = 0;
3790 			break;
3791 		}
3792 	}
3793 	return result;
3794 }
3795 
rtw8822c_dpk_coef_write(struct rtw_dev * rtwdev,u8 path,u8 result)3796 static void rtw8822c_dpk_coef_write(struct rtw_dev *rtwdev, u8 path, u8 result)
3797 {
3798 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3799 	u16 reg[DPK_RF_PATH_NUM] = {0x1b0c, 0x1b64};
3800 	u32 coef;
3801 	u8 addr;
3802 
3803 	rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3804 	rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3805 
3806 	for (addr = 0; addr < 20; addr++) {
3807 		if (result == 0) {
3808 			if (addr == 3)
3809 				coef = 0x04001fff;
3810 			else
3811 				coef = 0x00001fff;
3812 		} else {
3813 			coef = dpk_info->coef[path][addr];
3814 		}
3815 		rtw_write32(rtwdev, reg[path] + addr * 4, coef);
3816 	}
3817 }
3818 
rtw8822c_dpk_fill_result(struct rtw_dev * rtwdev,u32 dpk_txagc,u8 path,u8 result)3819 static void rtw8822c_dpk_fill_result(struct rtw_dev *rtwdev, u32 dpk_txagc,
3820 				     u8 path, u8 result)
3821 {
3822 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3823 
3824 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3825 
3826 	if (result)
3827 		rtw_write8(rtwdev, REG_DPD_AGC, (u8)(dpk_txagc - 6));
3828 	else
3829 		rtw_write8(rtwdev, REG_DPD_AGC, 0x00);
3830 
3831 	dpk_info->result[path] = result;
3832 	dpk_info->dpk_txagc[path] = rtw_read8(rtwdev, REG_DPD_AGC);
3833 
3834 	rtw8822c_dpk_coef_write(rtwdev, path, result);
3835 }
3836 
rtw8822c_dpk_gainloss(struct rtw_dev * rtwdev,u8 path)3837 static u32 rtw8822c_dpk_gainloss(struct rtw_dev *rtwdev, u8 path)
3838 {
3839 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3840 	u8 tx_agc, tx_bb, ori_txbb, ori_txagc, tx_agc_search, t1, t2;
3841 
3842 	ori_txbb = rtw8822c_dpk_rf_setting(rtwdev, path);
3843 	ori_txagc = (u8)rtw_read_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_TXAGC);
3844 
3845 	rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3846 	rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3847 	rtw8822c_dpk_dgain_read(rtwdev, path);
3848 
3849 	if (rtw8822c_dpk_dc_corr_check(rtwdev, path)) {
3850 		rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3851 		rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3852 		rtw8822c_dpk_dc_corr_check(rtwdev, path);
3853 	}
3854 
3855 	t1 = rtw8822c_dpk_thermal_read(rtwdev, path);
3856 	tx_bb = rtw8822c_dpk_pas_agc(rtwdev, path, false, true);
3857 	tx_agc_search = rtw8822c_dpk_gainloss_result(rtwdev, path);
3858 
3859 	if (tx_bb < tx_agc_search)
3860 		tx_bb = 0;
3861 	else
3862 		tx_bb = tx_bb - tx_agc_search;
3863 
3864 	rtw_write_rf(rtwdev, path, RF_TX_GAIN, BIT_GAIN_TXBB, tx_bb);
3865 
3866 	tx_agc = ori_txagc - (ori_txbb - tx_bb);
3867 
3868 	t2 = rtw8822c_dpk_thermal_read(rtwdev, path);
3869 
3870 	dpk_info->thermal_dpk_delta[path] = abs(t2 - t1);
3871 
3872 	return tx_agc;
3873 }
3874 
rtw8822c_dpk_by_path(struct rtw_dev * rtwdev,u32 tx_agc,u8 path)3875 static u8 rtw8822c_dpk_by_path(struct rtw_dev *rtwdev, u32 tx_agc, u8 path)
3876 {
3877 	u8 result;
3878 
3879 	result = rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DO_DPK);
3880 
3881 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3882 
3883 	result = result | (u8)rtw_read32_mask(rtwdev, REG_DPD_CTL1_S0, BIT(26));
3884 
3885 	rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x33e14);
3886 
3887 	rtw8822c_dpk_get_coef(rtwdev, path);
3888 
3889 	return result;
3890 }
3891 
rtw8822c_dpk_cal_gs(struct rtw_dev * rtwdev,u8 path)3892 static void rtw8822c_dpk_cal_gs(struct rtw_dev *rtwdev, u8 path)
3893 {
3894 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3895 	u32 tmp_gs = 0;
3896 
3897 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3898 	rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_BYPASS_DPD, 0x0);
3899 	rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
3900 	rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3901 	rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x1);
3902 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3903 	rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0xf);
3904 
3905 	if (path == RF_PATH_A) {
3906 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
3907 				 0x1066680);
3908 		rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN, 0x1);
3909 	} else {
3910 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
3911 				 0x1066680);
3912 		rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN, 0x1);
3913 	}
3914 
3915 	if (dpk_info->dpk_bw == DPK_CHANNEL_WIDTH_80) {
3916 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x80001310);
3917 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x00001310);
3918 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x810000db);
3919 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x010000db);
3920 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3921 		rtw_write32(rtwdev, REG_DPD_CTL15,
3922 			    0x05020000 | (BIT(path) << 28));
3923 	} else {
3924 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x8200190c);
3925 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x0200190c);
3926 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x8301ee14);
3927 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x0301ee14);
3928 		rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3929 		rtw_write32(rtwdev, REG_DPD_CTL15,
3930 			    0x05020008 | (BIT(path) << 28));
3931 	}
3932 
3933 	rtw_write32_mask(rtwdev, REG_DPD_CTL0, MASKBYTE3, 0x8 | path);
3934 
3935 	rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_CAL_PWR);
3936 
3937 	rtw_write32_mask(rtwdev, REG_DPD_CTL15, MASKBYTE3, 0x0);
3938 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3939 	rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3940 	rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x0);
3941 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3942 
3943 	if (path == RF_PATH_A)
3944 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, 0x5b);
3945 	else
3946 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, 0x5b);
3947 
3948 	rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3949 
3950 	tmp_gs = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, BIT_RPT_DGAIN);
3951 	tmp_gs = (tmp_gs * 910) >> 10;
3952 	tmp_gs = DIV_ROUND_CLOSEST(tmp_gs, 10);
3953 
3954 	if (path == RF_PATH_A)
3955 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, tmp_gs);
3956 	else
3957 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, tmp_gs);
3958 
3959 	dpk_info->dpk_gs[path] = tmp_gs;
3960 }
3961 
rtw8822c_dpk_cal_coef1(struct rtw_dev * rtwdev)3962 static void rtw8822c_dpk_cal_coef1(struct rtw_dev *rtwdev)
3963 {
3964 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3965 	u32 offset[DPK_RF_PATH_NUM] = {0, 0x58};
3966 	u32 i_scaling;
3967 	u8 path;
3968 
3969 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3970 	rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3971 	rtw_write32(rtwdev, REG_NCTL0, 0x00001148);
3972 	rtw_write32(rtwdev, REG_NCTL0, 0x00001149);
3973 
3974 	check_hw_ready(rtwdev, 0x2d9c, MASKBYTE0, 0x55);
3975 
3976 	rtw_write8(rtwdev, 0x1b10, 0x0);
3977 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3978 
3979 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3980 		i_scaling = 0x16c00 / dpk_info->dpk_gs[path];
3981 
3982 		rtw_write32_mask(rtwdev, 0x1b18 + offset[path], MASKHWORD,
3983 				 i_scaling);
3984 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3985 				 GENMASK(31, 28), 0x9);
3986 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3987 				 GENMASK(31, 28), 0x1);
3988 		rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3989 				 GENMASK(31, 28), 0x0);
3990 		rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0 + offset[path],
3991 				 BIT(14), 0x0);
3992 	}
3993 }
3994 
rtw8822c_dpk_on(struct rtw_dev * rtwdev,u8 path)3995 static void rtw8822c_dpk_on(struct rtw_dev *rtwdev, u8 path)
3996 {
3997 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3998 
3999 	rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4000 
4001 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
4002 	rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
4003 
4004 	if (test_bit(path, dpk_info->dpk_path_ok))
4005 		rtw8822c_dpk_cal_gs(rtwdev, path);
4006 }
4007 
rtw8822c_dpk_check_pass(struct rtw_dev * rtwdev,bool is_fail,u32 dpk_txagc,u8 path)4008 static bool rtw8822c_dpk_check_pass(struct rtw_dev *rtwdev, bool is_fail,
4009 				    u32 dpk_txagc, u8 path)
4010 {
4011 	bool result;
4012 
4013 	if (!is_fail) {
4014 		if (rtw8822c_dpk_coef_read(rtwdev, path))
4015 			result = true;
4016 		else
4017 			result = false;
4018 	} else {
4019 		result = false;
4020 	}
4021 
4022 	rtw8822c_dpk_fill_result(rtwdev, dpk_txagc, path, result);
4023 
4024 	return result;
4025 }
4026 
rtw8822c_dpk_result_reset(struct rtw_dev * rtwdev)4027 static void rtw8822c_dpk_result_reset(struct rtw_dev *rtwdev)
4028 {
4029 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4030 	u8 path;
4031 
4032 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4033 		clear_bit(path, dpk_info->dpk_path_ok);
4034 		rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4035 				 0x8 | (path << 1));
4036 		rtw_write32_mask(rtwdev, 0x1b58, 0x0000007f, 0x0);
4037 
4038 		dpk_info->dpk_txagc[path] = 0;
4039 		dpk_info->result[path] = 0;
4040 		dpk_info->dpk_gs[path] = 0x5b;
4041 		dpk_info->pre_pwsf[path] = 0;
4042 		dpk_info->thermal_dpk[path] = rtw8822c_dpk_thermal_read(rtwdev,
4043 									path);
4044 	}
4045 }
4046 
rtw8822c_dpk_calibrate(struct rtw_dev * rtwdev,u8 path)4047 static void rtw8822c_dpk_calibrate(struct rtw_dev *rtwdev, u8 path)
4048 {
4049 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4050 	u32 dpk_txagc;
4051 	u8 dpk_fail;
4052 
4053 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk start\n", path);
4054 
4055 	dpk_txagc = rtw8822c_dpk_gainloss(rtwdev, path);
4056 
4057 	dpk_fail = rtw8822c_dpk_by_path(rtwdev, dpk_txagc, path);
4058 
4059 	if (!rtw8822c_dpk_check_pass(rtwdev, dpk_fail, dpk_txagc, path))
4060 		rtw_err(rtwdev, "failed to do dpk calibration\n");
4061 
4062 	rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk finish\n", path);
4063 
4064 	if (dpk_info->result[path])
4065 		set_bit(path, dpk_info->dpk_path_ok);
4066 }
4067 
rtw8822c_dpk_path_select(struct rtw_dev * rtwdev)4068 static void rtw8822c_dpk_path_select(struct rtw_dev *rtwdev)
4069 {
4070 	rtw8822c_dpk_calibrate(rtwdev, RF_PATH_A);
4071 	rtw8822c_dpk_calibrate(rtwdev, RF_PATH_B);
4072 	rtw8822c_dpk_on(rtwdev, RF_PATH_A);
4073 	rtw8822c_dpk_on(rtwdev, RF_PATH_B);
4074 	rtw8822c_dpk_cal_coef1(rtwdev);
4075 }
4076 
rtw8822c_dpk_enable_disable(struct rtw_dev * rtwdev)4077 static void rtw8822c_dpk_enable_disable(struct rtw_dev *rtwdev)
4078 {
4079 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4080 	u32 mask = BIT(15) | BIT(14);
4081 
4082 	rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4083 
4084 	rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN,
4085 			 dpk_info->is_dpk_pwr_on);
4086 	rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN,
4087 			 dpk_info->is_dpk_pwr_on);
4088 
4089 	if (test_bit(RF_PATH_A, dpk_info->dpk_path_ok)) {
4090 		rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, mask, 0x0);
4091 		rtw_write8(rtwdev, REG_DPD_CTL0_S0, dpk_info->dpk_gs[RF_PATH_A]);
4092 	}
4093 	if (test_bit(RF_PATH_B, dpk_info->dpk_path_ok)) {
4094 		rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, mask, 0x0);
4095 		rtw_write8(rtwdev, REG_DPD_CTL0_S1, dpk_info->dpk_gs[RF_PATH_B]);
4096 	}
4097 }
4098 
rtw8822c_dpk_reload_data(struct rtw_dev * rtwdev)4099 static void rtw8822c_dpk_reload_data(struct rtw_dev *rtwdev)
4100 {
4101 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4102 	u8 path;
4103 
4104 	if (!test_bit(RF_PATH_A, dpk_info->dpk_path_ok) &&
4105 	    !test_bit(RF_PATH_B, dpk_info->dpk_path_ok) &&
4106 	    dpk_info->dpk_ch == 0)
4107 		return;
4108 
4109 	for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4110 		rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4111 				 0x8 | (path << 1));
4112 		if (dpk_info->dpk_band == RTW_BAND_2G)
4113 			rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
4114 		else
4115 			rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
4116 
4117 		rtw_write8(rtwdev, REG_DPD_AGC, dpk_info->dpk_txagc[path]);
4118 
4119 		rtw8822c_dpk_coef_write(rtwdev, path,
4120 					test_bit(path, dpk_info->dpk_path_ok));
4121 
4122 		rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4123 
4124 		rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4125 
4126 		if (path == RF_PATH_A)
4127 			rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
4128 					 dpk_info->dpk_gs[path]);
4129 		else
4130 			rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
4131 					 dpk_info->dpk_gs[path]);
4132 	}
4133 	rtw8822c_dpk_cal_coef1(rtwdev);
4134 }
4135 
rtw8822c_dpk_reload(struct rtw_dev * rtwdev)4136 static bool rtw8822c_dpk_reload(struct rtw_dev *rtwdev)
4137 {
4138 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4139 	u8 channel;
4140 
4141 	dpk_info->is_reload = false;
4142 
4143 	channel = (u8)(rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK) & 0xff);
4144 
4145 	if (channel == dpk_info->dpk_ch) {
4146 		rtw_dbg(rtwdev, RTW_DBG_RFK,
4147 			"[DPK] DPK reload for CH%d!!\n", dpk_info->dpk_ch);
4148 		rtw8822c_dpk_reload_data(rtwdev);
4149 		dpk_info->is_reload = true;
4150 	}
4151 
4152 	return dpk_info->is_reload;
4153 }
4154 
rtw8822c_do_dpk(struct rtw_dev * rtwdev)4155 static void rtw8822c_do_dpk(struct rtw_dev *rtwdev)
4156 {
4157 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4158 	struct rtw_backup_info bckp[DPK_BB_REG_NUM];
4159 	u32 rf_reg_backup[DPK_RF_REG_NUM][DPK_RF_PATH_NUM];
4160 	u32 bb_reg[DPK_BB_REG_NUM] = {
4161 		0x520, 0x820, 0x824, 0x1c3c, 0x1d58, 0x1864,
4162 		0x4164, 0x180c, 0x410c, 0x186c, 0x416c,
4163 		0x1a14, 0x1e70, 0x80c, 0x1d70, 0x1e7c, 0x18a4, 0x41a4};
4164 	u32 rf_reg[DPK_RF_REG_NUM] = {
4165 		0x0, 0x1a, 0x55, 0x63, 0x87, 0x8f, 0xde};
4166 	u8 path;
4167 
4168 	if (!dpk_info->is_dpk_pwr_on) {
4169 		rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] Skip DPK due to DPD PWR off\n");
4170 		return;
4171 	} else if (rtw8822c_dpk_reload(rtwdev)) {
4172 		return;
4173 	}
4174 
4175 	for (path = RF_PATH_A; path < DPK_RF_PATH_NUM; path++)
4176 		ewma_thermal_init(&dpk_info->avg_thermal[path]);
4177 
4178 	rtw8822c_dpk_information(rtwdev);
4179 
4180 	rtw8822c_dpk_backup_registers(rtwdev, bb_reg, DPK_BB_REG_NUM, bckp);
4181 	rtw8822c_dpk_backup_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4182 
4183 	rtw8822c_dpk_mac_bb_setting(rtwdev);
4184 	rtw8822c_dpk_afe_setting(rtwdev, true);
4185 	rtw8822c_dpk_pre_setting(rtwdev);
4186 	rtw8822c_dpk_result_reset(rtwdev);
4187 	rtw8822c_dpk_path_select(rtwdev);
4188 	rtw8822c_dpk_afe_setting(rtwdev, false);
4189 	rtw8822c_dpk_enable_disable(rtwdev);
4190 
4191 	rtw8822c_dpk_reload_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4192 	for (path = 0; path < rtwdev->hal.rf_path_num; path++)
4193 		rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
4194 	rtw8822c_dpk_restore_registers(rtwdev, DPK_BB_REG_NUM, bckp);
4195 }
4196 
rtw8822c_phy_calibration(struct rtw_dev * rtwdev)4197 static void rtw8822c_phy_calibration(struct rtw_dev *rtwdev)
4198 {
4199 	rtw8822c_rfk_power_save(rtwdev, false);
4200 	rtw8822c_do_gapk(rtwdev);
4201 	rtw8822c_do_iqk(rtwdev);
4202 	rtw8822c_do_dpk(rtwdev);
4203 	rtw8822c_rfk_power_save(rtwdev, true);
4204 }
4205 
rtw8822c_dpk_track(struct rtw_dev * rtwdev)4206 static void rtw8822c_dpk_track(struct rtw_dev *rtwdev)
4207 {
4208 	struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4209 	u8 path;
4210 	u8 thermal_value[DPK_RF_PATH_NUM] = {0};
4211 	s8 offset[DPK_RF_PATH_NUM], delta_dpk[DPK_RF_PATH_NUM];
4212 
4213 	if (dpk_info->thermal_dpk[0] == 0 && dpk_info->thermal_dpk[1] == 0)
4214 		return;
4215 
4216 	for (path = 0; path < DPK_RF_PATH_NUM; path++) {
4217 		thermal_value[path] = rtw8822c_dpk_thermal_read(rtwdev, path);
4218 		ewma_thermal_add(&dpk_info->avg_thermal[path],
4219 				 thermal_value[path]);
4220 		thermal_value[path] =
4221 			ewma_thermal_read(&dpk_info->avg_thermal[path]);
4222 		delta_dpk[path] = dpk_info->thermal_dpk[path] -
4223 				  thermal_value[path];
4224 		offset[path] = delta_dpk[path] -
4225 			       dpk_info->thermal_dpk_delta[path];
4226 		offset[path] &= 0x7f;
4227 
4228 		if (offset[path] != dpk_info->pre_pwsf[path]) {
4229 			rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4230 					 0x8 | (path << 1));
4231 			rtw_write32_mask(rtwdev, 0x1b58, GENMASK(6, 0),
4232 					 offset[path]);
4233 			dpk_info->pre_pwsf[path] = offset[path];
4234 		}
4235 	}
4236 }
4237 
4238 #define XCAP_EXTEND(val) ({typeof(val) _v = (val); _v | _v << 7; })
rtw8822c_set_crystal_cap_reg(struct rtw_dev * rtwdev,u8 crystal_cap)4239 static void rtw8822c_set_crystal_cap_reg(struct rtw_dev *rtwdev, u8 crystal_cap)
4240 {
4241 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4242 	struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4243 	u32 val = 0;
4244 
4245 	val = XCAP_EXTEND(crystal_cap);
4246 	cfo->crystal_cap = crystal_cap;
4247 	rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, BIT_XCAP_0, val);
4248 }
4249 
rtw8822c_set_crystal_cap(struct rtw_dev * rtwdev,u8 crystal_cap)4250 static void rtw8822c_set_crystal_cap(struct rtw_dev *rtwdev, u8 crystal_cap)
4251 {
4252 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4253 	struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4254 
4255 	if (cfo->crystal_cap == crystal_cap)
4256 		return;
4257 
4258 	rtw8822c_set_crystal_cap_reg(rtwdev, crystal_cap);
4259 }
4260 
rtw8822c_cfo_tracking_reset(struct rtw_dev * rtwdev)4261 static void rtw8822c_cfo_tracking_reset(struct rtw_dev *rtwdev)
4262 {
4263 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4264 	struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4265 
4266 	cfo->is_adjust = true;
4267 
4268 	if (cfo->crystal_cap > rtwdev->efuse.crystal_cap)
4269 		rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap - 1);
4270 	else if (cfo->crystal_cap < rtwdev->efuse.crystal_cap)
4271 		rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap + 1);
4272 }
4273 
rtw8822c_cfo_init(struct rtw_dev * rtwdev)4274 static void rtw8822c_cfo_init(struct rtw_dev *rtwdev)
4275 {
4276 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4277 	struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4278 
4279 	cfo->crystal_cap = rtwdev->efuse.crystal_cap;
4280 	cfo->is_adjust = true;
4281 }
4282 
4283 #define REPORT_TO_KHZ(val) ({typeof(val) _v = (val); (_v << 1) + (_v >> 1); })
rtw8822c_cfo_calc_avg(struct rtw_dev * rtwdev,u8 path_num)4284 static s32 rtw8822c_cfo_calc_avg(struct rtw_dev *rtwdev, u8 path_num)
4285 {
4286 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4287 	struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4288 	s32 cfo_avg, cfo_path_sum = 0, cfo_rpt_sum;
4289 	u8 i;
4290 
4291 	for (i = 0; i < path_num; i++) {
4292 		cfo_rpt_sum = REPORT_TO_KHZ(cfo->cfo_tail[i]);
4293 
4294 		if (cfo->cfo_cnt[i])
4295 			cfo_avg = cfo_rpt_sum / cfo->cfo_cnt[i];
4296 		else
4297 			cfo_avg = 0;
4298 
4299 		cfo_path_sum += cfo_avg;
4300 	}
4301 
4302 	for (i = 0; i < path_num; i++) {
4303 		cfo->cfo_tail[i] = 0;
4304 		cfo->cfo_cnt[i] = 0;
4305 	}
4306 
4307 	return cfo_path_sum / path_num;
4308 }
4309 
rtw8822c_cfo_need_adjust(struct rtw_dev * rtwdev,s32 cfo_avg)4310 static void rtw8822c_cfo_need_adjust(struct rtw_dev *rtwdev, s32 cfo_avg)
4311 {
4312 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4313 	struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4314 
4315 	if (!cfo->is_adjust) {
4316 		if (abs(cfo_avg) > CFO_TRK_ENABLE_TH)
4317 			cfo->is_adjust = true;
4318 	} else {
4319 		if (abs(cfo_avg) <= CFO_TRK_STOP_TH)
4320 			cfo->is_adjust = false;
4321 	}
4322 
4323 	if (!rtw_coex_disabled(rtwdev)) {
4324 		cfo->is_adjust = false;
4325 		rtw8822c_set_crystal_cap(rtwdev, rtwdev->efuse.crystal_cap);
4326 	}
4327 }
4328 
rtw8822c_cfo_track(struct rtw_dev * rtwdev)4329 static void rtw8822c_cfo_track(struct rtw_dev *rtwdev)
4330 {
4331 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4332 	struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4333 	u8 path_num = rtwdev->hal.rf_path_num;
4334 	s8 crystal_cap = cfo->crystal_cap;
4335 	s32 cfo_avg = 0;
4336 
4337 	if (rtwdev->sta_cnt != 1) {
4338 		rtw8822c_cfo_tracking_reset(rtwdev);
4339 		return;
4340 	}
4341 
4342 	if (cfo->packet_count == cfo->packet_count_pre)
4343 		return;
4344 
4345 	cfo->packet_count_pre = cfo->packet_count;
4346 	cfo_avg = rtw8822c_cfo_calc_avg(rtwdev, path_num);
4347 	rtw8822c_cfo_need_adjust(rtwdev, cfo_avg);
4348 
4349 	if (cfo->is_adjust) {
4350 		if (cfo_avg > CFO_TRK_ADJ_TH)
4351 			crystal_cap++;
4352 		else if (cfo_avg < -CFO_TRK_ADJ_TH)
4353 			crystal_cap--;
4354 
4355 		crystal_cap = clamp_t(s8, crystal_cap, 0, XCAP_MASK);
4356 		rtw8822c_set_crystal_cap(rtwdev, (u8)crystal_cap);
4357 	}
4358 }
4359 
4360 static const struct rtw_phy_cck_pd_reg
4361 rtw8822c_cck_pd_reg[RTW_CHANNEL_WIDTH_40 + 1][RTW_RF_PATH_MAX] = {
4362 	{
4363 		{0x1ac8, 0x00ff, 0x1ad0, 0x01f},
4364 		{0x1ac8, 0xff00, 0x1ad0, 0x3e0}
4365 	},
4366 	{
4367 		{0x1acc, 0x00ff, 0x1ad0, 0x01F00000},
4368 		{0x1acc, 0xff00, 0x1ad0, 0x3E000000}
4369 	},
4370 };
4371 
4372 #define RTW_CCK_PD_MAX 255
4373 #define RTW_CCK_CS_MAX 31
4374 #define RTW_CCK_CS_ERR1 27
4375 #define RTW_CCK_CS_ERR2 29
4376 static void
rtw8822c_phy_cck_pd_set_reg(struct rtw_dev * rtwdev,s8 pd_diff,s8 cs_diff,u8 bw,u8 nrx)4377 rtw8822c_phy_cck_pd_set_reg(struct rtw_dev *rtwdev,
4378 			    s8 pd_diff, s8 cs_diff, u8 bw, u8 nrx)
4379 {
4380 	u32 pd, cs;
4381 
4382 	if (WARN_ON(bw > RTW_CHANNEL_WIDTH_40 || nrx >= RTW_RF_PATH_MAX))
4383 		return;
4384 
4385 	pd = rtw_read32_mask(rtwdev,
4386 			     rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4387 			     rtw8822c_cck_pd_reg[bw][nrx].mask_pd);
4388 	cs = rtw_read32_mask(rtwdev,
4389 			     rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4390 			     rtw8822c_cck_pd_reg[bw][nrx].mask_cs);
4391 	pd += pd_diff;
4392 	cs += cs_diff;
4393 	if (pd > RTW_CCK_PD_MAX)
4394 		pd = RTW_CCK_PD_MAX;
4395 	if (cs == RTW_CCK_CS_ERR1 || cs == RTW_CCK_CS_ERR2)
4396 		cs++;
4397 	else if (cs > RTW_CCK_CS_MAX)
4398 		cs = RTW_CCK_CS_MAX;
4399 	rtw_write32_mask(rtwdev,
4400 			 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4401 			 rtw8822c_cck_pd_reg[bw][nrx].mask_pd,
4402 			 pd);
4403 	rtw_write32_mask(rtwdev,
4404 			 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4405 			 rtw8822c_cck_pd_reg[bw][nrx].mask_cs,
4406 			 cs);
4407 
4408 	rtw_dbg(rtwdev, RTW_DBG_PHY,
4409 		"is_linked=%d, bw=%d, nrx=%d, cs_ratio=0x%x, pd_th=0x%x\n",
4410 		rtw_is_assoc(rtwdev), bw, nrx, cs, pd);
4411 }
4412 
rtw8822c_phy_cck_pd_set(struct rtw_dev * rtwdev,u8 new_lvl)4413 static void rtw8822c_phy_cck_pd_set(struct rtw_dev *rtwdev, u8 new_lvl)
4414 {
4415 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4416 	s8 pd_lvl[CCK_PD_LV_MAX] = {0, 2, 4, 6, 8};
4417 	s8 cs_lvl[CCK_PD_LV_MAX] = {0, 2, 2, 2, 4};
4418 	u8 cur_lvl;
4419 	u8 nrx, bw;
4420 
4421 	nrx = (u8)rtw_read32_mask(rtwdev, 0x1a2c, 0x60000);
4422 	bw = (u8)rtw_read32_mask(rtwdev, 0x9b0, 0xc);
4423 
4424 	rtw_dbg(rtwdev, RTW_DBG_PHY, "lv: (%d) -> (%d) bw=%d nr=%d cck_fa_avg=%d\n",
4425 		dm_info->cck_pd_lv[bw][nrx], new_lvl, bw, nrx,
4426 		dm_info->cck_fa_avg);
4427 
4428 	if (dm_info->cck_pd_lv[bw][nrx] == new_lvl)
4429 		return;
4430 
4431 	cur_lvl = dm_info->cck_pd_lv[bw][nrx];
4432 
4433 	/* update cck pd info */
4434 	dm_info->cck_fa_avg = CCK_FA_AVG_RESET;
4435 
4436 	rtw8822c_phy_cck_pd_set_reg(rtwdev,
4437 				    pd_lvl[new_lvl] - pd_lvl[cur_lvl],
4438 				    cs_lvl[new_lvl] - cs_lvl[cur_lvl],
4439 				    bw, nrx);
4440 	dm_info->cck_pd_lv[bw][nrx] = new_lvl;
4441 }
4442 
4443 #define PWR_TRACK_MASK 0x7f
rtw8822c_pwrtrack_set(struct rtw_dev * rtwdev,u8 rf_path)4444 static void rtw8822c_pwrtrack_set(struct rtw_dev *rtwdev, u8 rf_path)
4445 {
4446 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4447 
4448 	switch (rf_path) {
4449 	case RF_PATH_A:
4450 		rtw_write32_mask(rtwdev, 0x18a0, PWR_TRACK_MASK,
4451 				 dm_info->delta_power_index[rf_path]);
4452 		break;
4453 	case RF_PATH_B:
4454 		rtw_write32_mask(rtwdev, 0x41a0, PWR_TRACK_MASK,
4455 				 dm_info->delta_power_index[rf_path]);
4456 		break;
4457 	default:
4458 		break;
4459 	}
4460 }
4461 
rtw8822c_pwr_track_stats(struct rtw_dev * rtwdev,u8 path)4462 static void rtw8822c_pwr_track_stats(struct rtw_dev *rtwdev, u8 path)
4463 {
4464 	u8 thermal_value;
4465 
4466 	if (rtwdev->efuse.thermal_meter[path] == 0xff)
4467 		return;
4468 
4469 	thermal_value = rtw_read_rf(rtwdev, path, RF_T_METER, 0x7e);
4470 	rtw_phy_pwrtrack_avg(rtwdev, thermal_value, path);
4471 }
4472 
rtw8822c_pwr_track_path(struct rtw_dev * rtwdev,struct rtw_swing_table * swing_table,u8 path)4473 static void rtw8822c_pwr_track_path(struct rtw_dev *rtwdev,
4474 				    struct rtw_swing_table *swing_table,
4475 				    u8 path)
4476 {
4477 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4478 	u8 delta;
4479 
4480 	delta = rtw_phy_pwrtrack_get_delta(rtwdev, path);
4481 	dm_info->delta_power_index[path] =
4482 		rtw_phy_pwrtrack_get_pwridx(rtwdev, swing_table, path, path,
4483 					    delta);
4484 	rtw8822c_pwrtrack_set(rtwdev, path);
4485 }
4486 
__rtw8822c_pwr_track(struct rtw_dev * rtwdev)4487 static void __rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4488 {
4489 	struct rtw_swing_table swing_table;
4490 	u8 i;
4491 
4492 	rtw_phy_config_swing_table(rtwdev, &swing_table);
4493 
4494 	for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4495 		rtw8822c_pwr_track_stats(rtwdev, i);
4496 	if (rtw_phy_pwrtrack_need_lck(rtwdev))
4497 		rtw8822c_do_lck(rtwdev);
4498 	for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4499 		rtw8822c_pwr_track_path(rtwdev, &swing_table, i);
4500 }
4501 
rtw8822c_pwr_track(struct rtw_dev * rtwdev)4502 static void rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4503 {
4504 	struct rtw_efuse *efuse = &rtwdev->efuse;
4505 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4506 
4507 	if (efuse->power_track_type != 0)
4508 		return;
4509 
4510 	if (!dm_info->pwr_trk_triggered) {
4511 		rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4512 		rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x00);
4513 		rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4514 
4515 		rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4516 		rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x00);
4517 		rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4518 
4519 		dm_info->pwr_trk_triggered = true;
4520 		return;
4521 	}
4522 
4523 	__rtw8822c_pwr_track(rtwdev);
4524 	dm_info->pwr_trk_triggered = false;
4525 }
4526 
rtw8822c_adaptivity_init(struct rtw_dev * rtwdev)4527 static void rtw8822c_adaptivity_init(struct rtw_dev *rtwdev)
4528 {
4529 	rtw_phy_set_edcca_th(rtwdev, RTW8822C_EDCCA_MAX, RTW8822C_EDCCA_MAX);
4530 
4531 	/* mac edcca state setting */
4532 	rtw_write32_clr(rtwdev, REG_TX_PTCL_CTRL, BIT_DIS_EDCCA);
4533 	rtw_write32_set(rtwdev, REG_RD_CTRL, BIT_EDCCA_MSK_CNTDOWN_EN);
4534 
4535 	/* edcca decistion opt */
4536 	rtw_write32_clr(rtwdev, REG_EDCCA_DECISION, BIT_EDCCA_OPTION);
4537 }
4538 
rtw8822c_adaptivity(struct rtw_dev * rtwdev)4539 static void rtw8822c_adaptivity(struct rtw_dev *rtwdev)
4540 {
4541 	struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4542 	s8 l2h, h2l;
4543 	u8 igi;
4544 
4545 	igi = dm_info->igi_history[0];
4546 	if (dm_info->edcca_mode == RTW_EDCCA_NORMAL) {
4547 		l2h = max_t(s8, igi + EDCCA_IGI_L2H_DIFF, EDCCA_TH_L2H_LB);
4548 		h2l = l2h - EDCCA_L2H_H2L_DIFF_NORMAL;
4549 	} else {
4550 		if (igi < dm_info->l2h_th_ini - EDCCA_ADC_BACKOFF)
4551 			l2h = igi + EDCCA_ADC_BACKOFF;
4552 		else
4553 			l2h = dm_info->l2h_th_ini;
4554 		h2l = l2h - EDCCA_L2H_H2L_DIFF;
4555 	}
4556 
4557 	rtw_phy_set_edcca_th(rtwdev, l2h, h2l);
4558 }
4559 
4560 static const struct rtw_pwr_seq_cmd trans_carddis_to_cardemu_8822c[] = {
4561 	{0x0086,
4562 	 RTW_PWR_CUT_ALL_MSK,
4563 	 RTW_PWR_INTF_SDIO_MSK,
4564 	 RTW_PWR_ADDR_SDIO,
4565 	 RTW_PWR_CMD_WRITE, BIT(0), 0},
4566 	{0x0086,
4567 	 RTW_PWR_CUT_ALL_MSK,
4568 	 RTW_PWR_INTF_SDIO_MSK,
4569 	 RTW_PWR_ADDR_SDIO,
4570 	 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4571 	{0x002E,
4572 	 RTW_PWR_CUT_ALL_MSK,
4573 	 RTW_PWR_INTF_ALL_MSK,
4574 	 RTW_PWR_ADDR_MAC,
4575 	 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4576 	{0x002D,
4577 	 RTW_PWR_CUT_ALL_MSK,
4578 	 RTW_PWR_INTF_ALL_MSK,
4579 	 RTW_PWR_ADDR_MAC,
4580 	 RTW_PWR_CMD_WRITE, BIT(0), 0},
4581 	{0x007F,
4582 	 RTW_PWR_CUT_ALL_MSK,
4583 	 RTW_PWR_INTF_ALL_MSK,
4584 	 RTW_PWR_ADDR_MAC,
4585 	 RTW_PWR_CMD_WRITE, BIT(7), 0},
4586 	{0x004A,
4587 	 RTW_PWR_CUT_ALL_MSK,
4588 	 RTW_PWR_INTF_USB_MSK,
4589 	 RTW_PWR_ADDR_MAC,
4590 	 RTW_PWR_CMD_WRITE, BIT(0), 0},
4591 	{0x0005,
4592 	 RTW_PWR_CUT_ALL_MSK,
4593 	 RTW_PWR_INTF_ALL_MSK,
4594 	 RTW_PWR_ADDR_MAC,
4595 	 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4) | BIT(7), 0},
4596 	{0xFFFF,
4597 	 RTW_PWR_CUT_ALL_MSK,
4598 	 RTW_PWR_INTF_ALL_MSK,
4599 	 0,
4600 	 RTW_PWR_CMD_END, 0, 0},
4601 };
4602 
4603 static const struct rtw_pwr_seq_cmd trans_cardemu_to_act_8822c[] = {
4604 	{0x0000,
4605 	 RTW_PWR_CUT_ALL_MSK,
4606 	 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4607 	 RTW_PWR_ADDR_MAC,
4608 	 RTW_PWR_CMD_WRITE, BIT(5), 0},
4609 	{0x0005,
4610 	 RTW_PWR_CUT_ALL_MSK,
4611 	 RTW_PWR_INTF_ALL_MSK,
4612 	 RTW_PWR_ADDR_MAC,
4613 	 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3) | BIT(2)), 0},
4614 	{0x0075,
4615 	 RTW_PWR_CUT_ALL_MSK,
4616 	 RTW_PWR_INTF_PCI_MSK,
4617 	 RTW_PWR_ADDR_MAC,
4618 	 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4619 	{0x0006,
4620 	 RTW_PWR_CUT_ALL_MSK,
4621 	 RTW_PWR_INTF_ALL_MSK,
4622 	 RTW_PWR_ADDR_MAC,
4623 	 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4624 	{0x0075,
4625 	 RTW_PWR_CUT_ALL_MSK,
4626 	 RTW_PWR_INTF_PCI_MSK,
4627 	 RTW_PWR_ADDR_MAC,
4628 	 RTW_PWR_CMD_WRITE, BIT(0), 0},
4629 	{0xFF1A,
4630 	 RTW_PWR_CUT_ALL_MSK,
4631 	 RTW_PWR_INTF_USB_MSK,
4632 	 RTW_PWR_ADDR_MAC,
4633 	 RTW_PWR_CMD_WRITE, 0xFF, 0},
4634 	{0x002E,
4635 	 RTW_PWR_CUT_ALL_MSK,
4636 	 RTW_PWR_INTF_ALL_MSK,
4637 	 RTW_PWR_ADDR_MAC,
4638 	 RTW_PWR_CMD_WRITE, BIT(3), 0},
4639 	{0x0006,
4640 	 RTW_PWR_CUT_ALL_MSK,
4641 	 RTW_PWR_INTF_ALL_MSK,
4642 	 RTW_PWR_ADDR_MAC,
4643 	 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4644 	{0x0005,
4645 	 RTW_PWR_CUT_ALL_MSK,
4646 	 RTW_PWR_INTF_ALL_MSK,
4647 	 RTW_PWR_ADDR_MAC,
4648 	 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3)), 0},
4649 	{0x1018,
4650 	 RTW_PWR_CUT_ALL_MSK,
4651 	 RTW_PWR_INTF_ALL_MSK,
4652 	 RTW_PWR_ADDR_MAC,
4653 	 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4654 	{0x0005,
4655 	 RTW_PWR_CUT_ALL_MSK,
4656 	 RTW_PWR_INTF_ALL_MSK,
4657 	 RTW_PWR_ADDR_MAC,
4658 	 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4659 	{0x0005,
4660 	 RTW_PWR_CUT_ALL_MSK,
4661 	 RTW_PWR_INTF_ALL_MSK,
4662 	 RTW_PWR_ADDR_MAC,
4663 	 RTW_PWR_CMD_POLLING, BIT(0), 0},
4664 	{0x0074,
4665 	 RTW_PWR_CUT_ALL_MSK,
4666 	 RTW_PWR_INTF_PCI_MSK,
4667 	 RTW_PWR_ADDR_MAC,
4668 	 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4669 	{0x0071,
4670 	 RTW_PWR_CUT_ALL_MSK,
4671 	 RTW_PWR_INTF_PCI_MSK,
4672 	 RTW_PWR_ADDR_MAC,
4673 	 RTW_PWR_CMD_WRITE, BIT(4), 0},
4674 	{0x0062,
4675 	 RTW_PWR_CUT_ALL_MSK,
4676 	 RTW_PWR_INTF_PCI_MSK,
4677 	 RTW_PWR_ADDR_MAC,
4678 	 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)),
4679 	 (BIT(7) | BIT(6) | BIT(5))},
4680 	{0x0061,
4681 	 RTW_PWR_CUT_ALL_MSK,
4682 	 RTW_PWR_INTF_PCI_MSK,
4683 	 RTW_PWR_ADDR_MAC,
4684 	 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)), 0},
4685 	{0x001F,
4686 	 RTW_PWR_CUT_ALL_MSK,
4687 	 RTW_PWR_INTF_ALL_MSK,
4688 	 RTW_PWR_ADDR_MAC,
4689 	 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4690 	{0x00EF,
4691 	 RTW_PWR_CUT_ALL_MSK,
4692 	 RTW_PWR_INTF_ALL_MSK,
4693 	 RTW_PWR_ADDR_MAC,
4694 	 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4695 	{0x1045,
4696 	 RTW_PWR_CUT_ALL_MSK,
4697 	 RTW_PWR_INTF_ALL_MSK,
4698 	 RTW_PWR_ADDR_MAC,
4699 	 RTW_PWR_CMD_WRITE, BIT(4), BIT(4)},
4700 	{0x0010,
4701 	 RTW_PWR_CUT_ALL_MSK,
4702 	 RTW_PWR_INTF_ALL_MSK,
4703 	 RTW_PWR_ADDR_MAC,
4704 	 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4705 	{0x1064,
4706 	 RTW_PWR_CUT_ALL_MSK,
4707 	 RTW_PWR_INTF_ALL_MSK,
4708 	 RTW_PWR_ADDR_MAC,
4709 	 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4710 	{0xFFFF,
4711 	 RTW_PWR_CUT_ALL_MSK,
4712 	 RTW_PWR_INTF_ALL_MSK,
4713 	 0,
4714 	 RTW_PWR_CMD_END, 0, 0},
4715 };
4716 
4717 static const struct rtw_pwr_seq_cmd trans_act_to_cardemu_8822c[] = {
4718 	{0x0093,
4719 	 RTW_PWR_CUT_ALL_MSK,
4720 	 RTW_PWR_INTF_ALL_MSK,
4721 	 RTW_PWR_ADDR_MAC,
4722 	 RTW_PWR_CMD_WRITE, BIT(3), 0},
4723 	{0x001F,
4724 	 RTW_PWR_CUT_ALL_MSK,
4725 	 RTW_PWR_INTF_ALL_MSK,
4726 	 RTW_PWR_ADDR_MAC,
4727 	 RTW_PWR_CMD_WRITE, 0xFF, 0},
4728 	{0x00EF,
4729 	 RTW_PWR_CUT_ALL_MSK,
4730 	 RTW_PWR_INTF_ALL_MSK,
4731 	 RTW_PWR_ADDR_MAC,
4732 	 RTW_PWR_CMD_WRITE, 0xFF, 0},
4733 	{0x1045,
4734 	 RTW_PWR_CUT_ALL_MSK,
4735 	 RTW_PWR_INTF_ALL_MSK,
4736 	 RTW_PWR_ADDR_MAC,
4737 	 RTW_PWR_CMD_WRITE, BIT(4), 0},
4738 	{0xFF1A,
4739 	 RTW_PWR_CUT_ALL_MSK,
4740 	 RTW_PWR_INTF_USB_MSK,
4741 	 RTW_PWR_ADDR_MAC,
4742 	 RTW_PWR_CMD_WRITE, 0xFF, 0x30},
4743 	{0x0049,
4744 	 RTW_PWR_CUT_ALL_MSK,
4745 	 RTW_PWR_INTF_ALL_MSK,
4746 	 RTW_PWR_ADDR_MAC,
4747 	 RTW_PWR_CMD_WRITE, BIT(1), 0},
4748 	{0x0006,
4749 	 RTW_PWR_CUT_ALL_MSK,
4750 	 RTW_PWR_INTF_ALL_MSK,
4751 	 RTW_PWR_ADDR_MAC,
4752 	 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4753 	{0x0002,
4754 	 RTW_PWR_CUT_ALL_MSK,
4755 	 RTW_PWR_INTF_ALL_MSK,
4756 	 RTW_PWR_ADDR_MAC,
4757 	 RTW_PWR_CMD_WRITE, BIT(1), 0},
4758 	{0x0005,
4759 	 RTW_PWR_CUT_ALL_MSK,
4760 	 RTW_PWR_INTF_ALL_MSK,
4761 	 RTW_PWR_ADDR_MAC,
4762 	 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4763 	{0x0005,
4764 	 RTW_PWR_CUT_ALL_MSK,
4765 	 RTW_PWR_INTF_ALL_MSK,
4766 	 RTW_PWR_ADDR_MAC,
4767 	 RTW_PWR_CMD_POLLING, BIT(1), 0},
4768 	{0x0000,
4769 	 RTW_PWR_CUT_ALL_MSK,
4770 	 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4771 	 RTW_PWR_ADDR_MAC,
4772 	 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4773 	{0xFFFF,
4774 	 RTW_PWR_CUT_ALL_MSK,
4775 	 RTW_PWR_INTF_ALL_MSK,
4776 	 0,
4777 	 RTW_PWR_CMD_END, 0, 0},
4778 };
4779 
4780 static const struct rtw_pwr_seq_cmd trans_cardemu_to_carddis_8822c[] = {
4781 	{0x0005,
4782 	 RTW_PWR_CUT_ALL_MSK,
4783 	 RTW_PWR_INTF_SDIO_MSK,
4784 	 RTW_PWR_ADDR_MAC,
4785 	 RTW_PWR_CMD_WRITE, BIT(7), BIT(7)},
4786 	{0x0007,
4787 	 RTW_PWR_CUT_ALL_MSK,
4788 	 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4789 	 RTW_PWR_ADDR_MAC,
4790 	 RTW_PWR_CMD_WRITE, 0xFF, 0x00},
4791 	{0x0067,
4792 	 RTW_PWR_CUT_ALL_MSK,
4793 	 RTW_PWR_INTF_ALL_MSK,
4794 	 RTW_PWR_ADDR_MAC,
4795 	 RTW_PWR_CMD_WRITE, BIT(5), 0},
4796 	{0x004A,
4797 	 RTW_PWR_CUT_ALL_MSK,
4798 	 RTW_PWR_INTF_USB_MSK,
4799 	 RTW_PWR_ADDR_MAC,
4800 	 RTW_PWR_CMD_WRITE, BIT(0), 0},
4801 	{0x0081,
4802 	 RTW_PWR_CUT_ALL_MSK,
4803 	 RTW_PWR_INTF_ALL_MSK,
4804 	 RTW_PWR_ADDR_MAC,
4805 	 RTW_PWR_CMD_WRITE, BIT(7) | BIT(6), 0},
4806 	{0x0090,
4807 	 RTW_PWR_CUT_ALL_MSK,
4808 	 RTW_PWR_INTF_ALL_MSK,
4809 	 RTW_PWR_ADDR_MAC,
4810 	 RTW_PWR_CMD_WRITE, BIT(1), 0},
4811 	{0x0092,
4812 	 RTW_PWR_CUT_ALL_MSK,
4813 	 RTW_PWR_INTF_PCI_MSK,
4814 	 RTW_PWR_ADDR_MAC,
4815 	 RTW_PWR_CMD_WRITE, 0xFF, 0x20},
4816 	{0x0093,
4817 	 RTW_PWR_CUT_ALL_MSK,
4818 	 RTW_PWR_INTF_PCI_MSK,
4819 	 RTW_PWR_ADDR_MAC,
4820 	 RTW_PWR_CMD_WRITE, 0xFF, 0x04},
4821 	{0x0005,
4822 	 RTW_PWR_CUT_ALL_MSK,
4823 	 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4824 	 RTW_PWR_ADDR_MAC,
4825 	 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4), BIT(3)},
4826 	{0x0005,
4827 	 RTW_PWR_CUT_ALL_MSK,
4828 	 RTW_PWR_INTF_PCI_MSK,
4829 	 RTW_PWR_ADDR_MAC,
4830 	 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4831 	{0x0086,
4832 	 RTW_PWR_CUT_ALL_MSK,
4833 	 RTW_PWR_INTF_SDIO_MSK,
4834 	 RTW_PWR_ADDR_SDIO,
4835 	 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4836 	{0xFFFF,
4837 	 RTW_PWR_CUT_ALL_MSK,
4838 	 RTW_PWR_INTF_ALL_MSK,
4839 	 0,
4840 	 RTW_PWR_CMD_END, 0, 0},
4841 };
4842 
4843 static const struct rtw_pwr_seq_cmd *card_enable_flow_8822c[] = {
4844 	trans_carddis_to_cardemu_8822c,
4845 	trans_cardemu_to_act_8822c,
4846 	NULL
4847 };
4848 
4849 static const struct rtw_pwr_seq_cmd *card_disable_flow_8822c[] = {
4850 	trans_act_to_cardemu_8822c,
4851 	trans_cardemu_to_carddis_8822c,
4852 	NULL
4853 };
4854 
4855 static const struct rtw_intf_phy_para usb2_param_8822c[] = {
4856 	{0xFFFF, 0x00,
4857 	 RTW_IP_SEL_PHY,
4858 	 RTW_INTF_PHY_CUT_ALL,
4859 	 RTW_INTF_PHY_PLATFORM_ALL},
4860 };
4861 
4862 static const struct rtw_intf_phy_para usb3_param_8822c[] = {
4863 	{0xFFFF, 0x0000,
4864 	 RTW_IP_SEL_PHY,
4865 	 RTW_INTF_PHY_CUT_ALL,
4866 	 RTW_INTF_PHY_PLATFORM_ALL},
4867 };
4868 
4869 static const struct rtw_intf_phy_para pcie_gen1_param_8822c[] = {
4870 	{0xFFFF, 0x0000,
4871 	 RTW_IP_SEL_PHY,
4872 	 RTW_INTF_PHY_CUT_ALL,
4873 	 RTW_INTF_PHY_PLATFORM_ALL},
4874 };
4875 
4876 static const struct rtw_intf_phy_para pcie_gen2_param_8822c[] = {
4877 	{0xFFFF, 0x0000,
4878 	 RTW_IP_SEL_PHY,
4879 	 RTW_INTF_PHY_CUT_ALL,
4880 	 RTW_INTF_PHY_PLATFORM_ALL},
4881 };
4882 
4883 static const struct rtw_intf_phy_para_table phy_para_table_8822c = {
4884 	.usb2_para	= usb2_param_8822c,
4885 	.usb3_para	= usb3_param_8822c,
4886 	.gen1_para	= pcie_gen1_param_8822c,
4887 	.gen2_para	= pcie_gen2_param_8822c,
4888 	.n_usb2_para	= ARRAY_SIZE(usb2_param_8822c),
4889 	.n_usb3_para	= ARRAY_SIZE(usb2_param_8822c),
4890 	.n_gen1_para	= ARRAY_SIZE(pcie_gen1_param_8822c),
4891 	.n_gen2_para	= ARRAY_SIZE(pcie_gen2_param_8822c),
4892 };
4893 
4894 static const struct rtw_rfe_def rtw8822c_rfe_defs[] = {
4895 	[0] = RTW_DEF_RFE(8822c, 0, 0),
4896 	[1] = RTW_DEF_RFE(8822c, 0, 0),
4897 	[2] = RTW_DEF_RFE(8822c, 0, 0),
4898 	[5] = RTW_DEF_RFE(8822c, 0, 5),
4899 	[6] = RTW_DEF_RFE(8822c, 0, 0),
4900 };
4901 
4902 static const struct rtw_hw_reg rtw8822c_dig[] = {
4903 	[0] = { .addr = 0x1d70, .mask = 0x7f },
4904 	[1] = { .addr = 0x1d70, .mask = 0x7f00 },
4905 };
4906 
4907 static const struct rtw_ltecoex_addr rtw8822c_ltecoex_addr = {
4908 	.ctrl = LTECOEX_ACCESS_CTRL,
4909 	.wdata = LTECOEX_WRITE_DATA,
4910 	.rdata = LTECOEX_READ_DATA,
4911 };
4912 
4913 static const struct rtw_page_table page_table_8822c[] = {
4914 	{64, 64, 64, 64, 1},
4915 	{64, 64, 64, 64, 1},
4916 	{64, 64, 0, 0, 1},
4917 	{64, 64, 64, 0, 1},
4918 	{64, 64, 64, 64, 1},
4919 };
4920 
4921 static const struct rtw_rqpn rqpn_table_8822c[] = {
4922 	{RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4923 	 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4924 	 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4925 	{RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4926 	 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4927 	 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4928 	{RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4929 	 RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_HIGH,
4930 	 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4931 	{RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4932 	 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4933 	 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4934 	{RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4935 	 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4936 	 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4937 };
4938 
4939 static struct rtw_prioq_addrs prioq_addrs_8822c = {
4940 	.prio[RTW_DMA_MAPPING_EXTRA] = {
4941 		.rsvd = REG_FIFOPAGE_INFO_4, .avail = REG_FIFOPAGE_INFO_4 + 2,
4942 	},
4943 	.prio[RTW_DMA_MAPPING_LOW] = {
4944 		.rsvd = REG_FIFOPAGE_INFO_2, .avail = REG_FIFOPAGE_INFO_2 + 2,
4945 	},
4946 	.prio[RTW_DMA_MAPPING_NORMAL] = {
4947 		.rsvd = REG_FIFOPAGE_INFO_3, .avail = REG_FIFOPAGE_INFO_3 + 2,
4948 	},
4949 	.prio[RTW_DMA_MAPPING_HIGH] = {
4950 		.rsvd = REG_FIFOPAGE_INFO_1, .avail = REG_FIFOPAGE_INFO_1 + 2,
4951 	},
4952 	.wsize = true,
4953 };
4954 
4955 static struct rtw_chip_ops rtw8822c_ops = {
4956 	.phy_set_param		= rtw8822c_phy_set_param,
4957 	.read_efuse		= rtw8822c_read_efuse,
4958 	.query_rx_desc		= rtw8822c_query_rx_desc,
4959 	.set_channel		= rtw8822c_set_channel,
4960 	.mac_init		= rtw8822c_mac_init,
4961 	.dump_fw_crash		= rtw8822c_dump_fw_crash,
4962 	.read_rf		= rtw_phy_read_rf,
4963 	.write_rf		= rtw_phy_write_rf_reg_mix,
4964 	.set_tx_power_index	= rtw8822c_set_tx_power_index,
4965 	.set_antenna		= rtw8822c_set_antenna,
4966 	.cfg_ldo25		= rtw8822c_cfg_ldo25,
4967 	.false_alarm_statistics	= rtw8822c_false_alarm_statistics,
4968 	.dpk_track		= rtw8822c_dpk_track,
4969 	.phy_calibration	= rtw8822c_phy_calibration,
4970 	.cck_pd_set		= rtw8822c_phy_cck_pd_set,
4971 	.pwr_track		= rtw8822c_pwr_track,
4972 	.config_bfee		= rtw8822c_bf_config_bfee,
4973 	.set_gid_table		= rtw_bf_set_gid_table,
4974 	.cfg_csi_rate		= rtw_bf_cfg_csi_rate,
4975 	.adaptivity_init	= rtw8822c_adaptivity_init,
4976 	.adaptivity		= rtw8822c_adaptivity,
4977 	.cfo_init		= rtw8822c_cfo_init,
4978 	.cfo_track		= rtw8822c_cfo_track,
4979 	.config_tx_path		= rtw8822c_config_tx_path,
4980 	.config_txrx_mode	= rtw8822c_config_trx_mode,
4981 
4982 	.coex_set_init		= rtw8822c_coex_cfg_init,
4983 	.coex_set_ant_switch	= NULL,
4984 	.coex_set_gnt_fix	= rtw8822c_coex_cfg_gnt_fix,
4985 	.coex_set_gnt_debug	= rtw8822c_coex_cfg_gnt_debug,
4986 	.coex_set_rfe_type	= rtw8822c_coex_cfg_rfe_type,
4987 	.coex_set_wl_tx_power	= rtw8822c_coex_cfg_wl_tx_power,
4988 	.coex_set_wl_rx_gain	= rtw8822c_coex_cfg_wl_rx_gain,
4989 };
4990 
4991 /* Shared-Antenna Coex Table */
4992 static const struct coex_table_para table_sant_8822c[] = {
4993 	{0xffffffff, 0xffffffff}, /* case-0 */
4994 	{0x55555555, 0x55555555},
4995 	{0x66555555, 0x66555555},
4996 	{0xaaaaaaaa, 0xaaaaaaaa},
4997 	{0x5a5a5a5a, 0x5a5a5a5a},
4998 	{0xfafafafa, 0xfafafafa}, /* case-5 */
4999 	{0x6a5a5555, 0xaaaaaaaa},
5000 	{0x6a5a56aa, 0x6a5a56aa},
5001 	{0x6a5a5a5a, 0x6a5a5a5a},
5002 	{0x66555555, 0x5a5a5a5a},
5003 	{0x66555555, 0x6a5a5a5a}, /* case-10 */
5004 	{0x66555555, 0x6a5a5aaa},
5005 	{0x66555555, 0x5a5a5aaa},
5006 	{0x66555555, 0x6aaa5aaa},
5007 	{0x66555555, 0xaaaa5aaa},
5008 	{0x66555555, 0xaaaaaaaa}, /* case-15 */
5009 	{0xffff55ff, 0xfafafafa},
5010 	{0xffff55ff, 0x6afa5afa},
5011 	{0xaaffffaa, 0xfafafafa},
5012 	{0xaa5555aa, 0x5a5a5a5a},
5013 	{0xaa5555aa, 0x6a5a5a5a}, /* case-20 */
5014 	{0xaa5555aa, 0xaaaaaaaa},
5015 	{0xffffffff, 0x5a5a5a5a},
5016 	{0xffffffff, 0x5a5a5a5a},
5017 	{0xffffffff, 0x55555555},
5018 	{0xffffffff, 0x5a5a5aaa}, /* case-25 */
5019 	{0x55555555, 0x5a5a5a5a},
5020 	{0x55555555, 0xaaaaaaaa},
5021 	{0x55555555, 0x6a5a6a5a},
5022 	{0x66556655, 0x66556655},
5023 	{0x66556aaa, 0x6a5a6aaa}, /*case-30*/
5024 	{0xffffffff, 0x5aaa5aaa},
5025 	{0x56555555, 0x5a5a5aaa},
5026 	{0xdaffdaff, 0xdaffdaff},
5027 	{0xddffddff, 0xddffddff},
5028 };
5029 
5030 /* Non-Shared-Antenna Coex Table */
5031 static const struct coex_table_para table_nsant_8822c[] = {
5032 	{0xffffffff, 0xffffffff}, /* case-100 */
5033 	{0x55555555, 0x55555555},
5034 	{0x66555555, 0x66555555},
5035 	{0xaaaaaaaa, 0xaaaaaaaa},
5036 	{0x5a5a5a5a, 0x5a5a5a5a},
5037 	{0xfafafafa, 0xfafafafa}, /* case-105 */
5038 	{0x5afa5afa, 0x5afa5afa},
5039 	{0x55555555, 0xfafafafa},
5040 	{0x66555555, 0xfafafafa},
5041 	{0x66555555, 0x5a5a5a5a},
5042 	{0x66555555, 0x6a5a5a5a}, /* case-110 */
5043 	{0x66555555, 0xaaaaaaaa},
5044 	{0xffff55ff, 0xfafafafa},
5045 	{0xffff55ff, 0x5afa5afa},
5046 	{0xffff55ff, 0xaaaaaaaa},
5047 	{0xffff55ff, 0xffff55ff}, /* case-115 */
5048 	{0xaaffffaa, 0x5afa5afa},
5049 	{0xaaffffaa, 0xaaaaaaaa},
5050 	{0xffffffff, 0xfafafafa},
5051 	{0xffffffff, 0x5afa5afa},
5052 	{0xffffffff, 0xaaaaaaaa}, /* case-120 */
5053 	{0x55ff55ff, 0x5afa5afa},
5054 	{0x55ff55ff, 0xaaaaaaaa},
5055 	{0x55ff55ff, 0x55ff55ff}
5056 };
5057 
5058 /* Shared-Antenna TDMA */
5059 static const struct coex_tdma_para tdma_sant_8822c[] = {
5060 	{ {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-0 */
5061 	{ {0x61, 0x45, 0x03, 0x11, 0x11} }, /* case-1 */
5062 	{ {0x61, 0x3a, 0x03, 0x11, 0x11} },
5063 	{ {0x61, 0x30, 0x03, 0x11, 0x11} },
5064 	{ {0x61, 0x20, 0x03, 0x11, 0x11} },
5065 	{ {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-5 */
5066 	{ {0x61, 0x45, 0x03, 0x11, 0x10} },
5067 	{ {0x61, 0x3a, 0x03, 0x11, 0x10} },
5068 	{ {0x61, 0x30, 0x03, 0x11, 0x10} },
5069 	{ {0x61, 0x20, 0x03, 0x11, 0x10} },
5070 	{ {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-10 */
5071 	{ {0x61, 0x08, 0x03, 0x11, 0x14} },
5072 	{ {0x61, 0x08, 0x03, 0x10, 0x14} },
5073 	{ {0x51, 0x08, 0x03, 0x10, 0x54} },
5074 	{ {0x51, 0x08, 0x03, 0x10, 0x55} },
5075 	{ {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-15 */
5076 	{ {0x51, 0x45, 0x03, 0x10, 0x50} },
5077 	{ {0x51, 0x3a, 0x03, 0x10, 0x50} },
5078 	{ {0x51, 0x30, 0x03, 0x10, 0x50} },
5079 	{ {0x51, 0x20, 0x03, 0x10, 0x50} },
5080 	{ {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-20 */
5081 	{ {0x51, 0x4a, 0x03, 0x10, 0x50} },
5082 	{ {0x51, 0x0c, 0x03, 0x10, 0x54} },
5083 	{ {0x55, 0x08, 0x03, 0x10, 0x54} },
5084 	{ {0x65, 0x10, 0x03, 0x11, 0x10} },
5085 	{ {0x51, 0x10, 0x03, 0x10, 0x51} }, /* case-25 */
5086 	{ {0x51, 0x08, 0x03, 0x10, 0x50} },
5087 	{ {0x61, 0x08, 0x03, 0x11, 0x11} }
5088 };
5089 
5090 /* Non-Shared-Antenna TDMA */
5091 static const struct coex_tdma_para tdma_nsant_8822c[] = {
5092 	{ {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-100 */
5093 	{ {0x61, 0x45, 0x03, 0x11, 0x11} },
5094 	{ {0x61, 0x3a, 0x03, 0x11, 0x11} },
5095 	{ {0x61, 0x30, 0x03, 0x11, 0x11} },
5096 	{ {0x61, 0x20, 0x03, 0x11, 0x11} },
5097 	{ {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-105 */
5098 	{ {0x61, 0x45, 0x03, 0x11, 0x10} },
5099 	{ {0x61, 0x3a, 0x03, 0x11, 0x10} },
5100 	{ {0x61, 0x30, 0x03, 0x11, 0x10} },
5101 	{ {0x61, 0x20, 0x03, 0x11, 0x10} },
5102 	{ {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-110 */
5103 	{ {0x61, 0x08, 0x03, 0x11, 0x14} },
5104 	{ {0x61, 0x08, 0x03, 0x10, 0x14} },
5105 	{ {0x51, 0x08, 0x03, 0x10, 0x54} },
5106 	{ {0x51, 0x08, 0x03, 0x10, 0x55} },
5107 	{ {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-115 */
5108 	{ {0x51, 0x45, 0x03, 0x10, 0x50} },
5109 	{ {0x51, 0x3a, 0x03, 0x10, 0x50} },
5110 	{ {0x51, 0x30, 0x03, 0x10, 0x50} },
5111 	{ {0x51, 0x20, 0x03, 0x10, 0x50} },
5112 	{ {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-120 */
5113 	{ {0x51, 0x08, 0x03, 0x10, 0x50} }
5114 };
5115 
5116 /* rssi in percentage % (dbm = % - 100) */
5117 static const u8 wl_rssi_step_8822c[] = {60, 50, 44, 30};
5118 static const u8 bt_rssi_step_8822c[] = {8, 15, 20, 25};
5119 static const struct coex_5g_afh_map afh_5g_8822c[] = { {0, 0, 0} };
5120 
5121 /* wl_tx_dec_power, bt_tx_dec_power, wl_rx_gain, bt_rx_lna_constrain */
5122 static const struct coex_rf_para rf_para_tx_8822c[] = {
5123 	{0, 0, false, 7},  /* for normal */
5124 	{0, 16, false, 7}, /* for WL-CPT */
5125 	{8, 17, true, 4},
5126 	{7, 18, true, 4},
5127 	{6, 19, true, 4},
5128 	{5, 20, true, 4},
5129 	{0, 21, true, 4}   /* for gamg hid */
5130 };
5131 
5132 static const struct coex_rf_para rf_para_rx_8822c[] = {
5133 	{0, 0, false, 7},  /* for normal */
5134 	{0, 16, false, 7}, /* for WL-CPT */
5135 	{3, 24, true, 5},
5136 	{2, 26, true, 5},
5137 	{1, 27, true, 5},
5138 	{0, 28, true, 5},
5139 	{0, 28, true, 5}   /* for gamg hid */
5140 };
5141 
5142 static_assert(ARRAY_SIZE(rf_para_tx_8822c) == ARRAY_SIZE(rf_para_rx_8822c));
5143 
5144 static const u8
5145 rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5146 	{ 0,  1,  2,  3,  5,  6,  7,  8,  9, 10,
5147 	 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5148 	 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5149 	{ 0,  1,  2,  3,  5,  6,  7,  8,  9, 10,
5150 	 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5151 	 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5152 	{ 0,  1,  2,  3,  5,  6,  7,  8,  9, 10,
5153 	 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5154 	 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5155 };
5156 
5157 static const u8
5158 rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5159 	{ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5160 	 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5161 	 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5162 	{ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5163 	 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5164 	 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5165 	{ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5166 	 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5167 	 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5168 };
5169 
5170 static const u8
5171 rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5172 	{ 0,  1,  2,  4,  5,  6,  7,  8,  9, 10,
5173 	 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5174 	 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5175 	{ 0,  1,  2,  4,  5,  6,  7,  8,  9, 10,
5176 	 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5177 	 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5178 	{ 0,  1,  2,  4,  5,  6,  7,  8,  9, 10,
5179 	 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5180 	 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5181 };
5182 
5183 static const u8
5184 rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5185 	{ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5186 	 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5187 	 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5188 	{ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5189 	 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5190 	 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5191 	{ 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5192 	 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5193 	 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5194 };
5195 
5196 static const u8 rtw8822c_pwrtrk_2gb_n[RTW_PWR_TRK_TBL_SZ] = {
5197 	 0,  1,  2,  3,  4,  4,  5,  6,  7,  8,
5198 	 9,  9, 10, 11, 12, 13, 14, 15, 15, 16,
5199 	17, 18, 19, 20, 20, 21, 22, 23, 24, 25
5200 };
5201 
5202 static const u8 rtw8822c_pwrtrk_2gb_p[RTW_PWR_TRK_TBL_SZ] = {
5203 	 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5204 	10, 11, 12, 13, 14, 14, 15, 16, 17, 18,
5205 	19, 20, 21, 22, 23, 24, 25, 26, 27, 28
5206 };
5207 
5208 static const u8 rtw8822c_pwrtrk_2ga_n[RTW_PWR_TRK_TBL_SZ] = {
5209 	 0,  1,  2,  2,  3,  4,  4,  5,  6,  6,
5210 	 7,  8,  8,  9,  9, 10, 11, 11, 12, 13,
5211 	13, 14, 15, 15, 16, 17, 17, 18, 19, 19
5212 };
5213 
5214 static const u8 rtw8822c_pwrtrk_2ga_p[RTW_PWR_TRK_TBL_SZ] = {
5215 	 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5216 	10, 11, 11, 12, 13, 14, 15, 16, 17, 18,
5217 	19, 20, 21, 22, 23, 24, 25, 25, 26, 27
5218 };
5219 
5220 static const u8 rtw8822c_pwrtrk_2g_cck_b_n[RTW_PWR_TRK_TBL_SZ] = {
5221 	 0,  1,  2,  3,  4,  5,  5,  6,  7,  8,
5222 	 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5223 	17, 18, 19, 20, 21, 22, 23, 23, 24, 25
5224 };
5225 
5226 static const u8 rtw8822c_pwrtrk_2g_cck_b_p[RTW_PWR_TRK_TBL_SZ] = {
5227 	 0,  1,  2,  3,  4,  5,  6,  7,  8,  9,
5228 	10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
5229 	20, 21, 22, 23, 24, 25, 26, 27, 28, 29
5230 };
5231 
5232 static const u8 rtw8822c_pwrtrk_2g_cck_a_n[RTW_PWR_TRK_TBL_SZ] = {
5233 	 0,  1,  2,  3,  3,  4,  5,  6,  6,  7,
5234 	 8,  9,  9, 10, 11, 12, 12, 13, 14, 15,
5235 	15, 16, 17, 18, 18, 19, 20, 21, 21, 22
5236 };
5237 
5238 static const u8 rtw8822c_pwrtrk_2g_cck_a_p[RTW_PWR_TRK_TBL_SZ] = {
5239 	 0,  1,  2,  3,  4,  5,  5,  6,  7,  8,
5240 	 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5241 	18, 18, 19, 20, 21, 22, 23, 24, 24, 25
5242 };
5243 
5244 static const struct rtw_pwr_track_tbl rtw8822c_rtw_pwr_track_tbl = {
5245 	.pwrtrk_5gb_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_1],
5246 	.pwrtrk_5gb_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_2],
5247 	.pwrtrk_5gb_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_3],
5248 	.pwrtrk_5gb_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_1],
5249 	.pwrtrk_5gb_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_2],
5250 	.pwrtrk_5gb_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_3],
5251 	.pwrtrk_5ga_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_1],
5252 	.pwrtrk_5ga_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_2],
5253 	.pwrtrk_5ga_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_3],
5254 	.pwrtrk_5ga_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_1],
5255 	.pwrtrk_5ga_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_2],
5256 	.pwrtrk_5ga_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_3],
5257 	.pwrtrk_2gb_n = rtw8822c_pwrtrk_2gb_n,
5258 	.pwrtrk_2gb_p = rtw8822c_pwrtrk_2gb_p,
5259 	.pwrtrk_2ga_n = rtw8822c_pwrtrk_2ga_n,
5260 	.pwrtrk_2ga_p = rtw8822c_pwrtrk_2ga_p,
5261 	.pwrtrk_2g_cckb_n = rtw8822c_pwrtrk_2g_cck_b_n,
5262 	.pwrtrk_2g_cckb_p = rtw8822c_pwrtrk_2g_cck_b_p,
5263 	.pwrtrk_2g_ccka_n = rtw8822c_pwrtrk_2g_cck_a_n,
5264 	.pwrtrk_2g_ccka_p = rtw8822c_pwrtrk_2g_cck_a_p,
5265 };
5266 
5267 static struct rtw_hw_reg_offset rtw8822c_edcca_th[] = {
5268 	[EDCCA_TH_L2H_IDX] = {
5269 		{.addr = 0x84c, .mask = MASKBYTE2}, .offset = 0x80
5270 	},
5271 	[EDCCA_TH_H2L_IDX] = {
5272 		{.addr = 0x84c, .mask = MASKBYTE3}, .offset = 0x80
5273 	},
5274 };
5275 
5276 #ifdef CONFIG_PM
5277 static const struct wiphy_wowlan_support rtw_wowlan_stub_8822c = {
5278 	.flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_GTK_REKEY_FAILURE |
5279 		 WIPHY_WOWLAN_DISCONNECT | WIPHY_WOWLAN_SUPPORTS_GTK_REKEY |
5280 		 WIPHY_WOWLAN_NET_DETECT,
5281 	.n_patterns = RTW_MAX_PATTERN_NUM,
5282 	.pattern_max_len = RTW_MAX_PATTERN_SIZE,
5283 	.pattern_min_len = 1,
5284 	.max_nd_match_sets = 4,
5285 };
5286 #endif
5287 
5288 static const struct rtw_reg_domain coex_info_hw_regs_8822c[] = {
5289 	{0x1860, BIT(3), RTW_REG_DOMAIN_MAC8},
5290 	{0x4160, BIT(3), RTW_REG_DOMAIN_MAC8},
5291 	{0x1c32, BIT(6), RTW_REG_DOMAIN_MAC8},
5292 	{0x1c38, BIT(28), RTW_REG_DOMAIN_MAC32},
5293 	{0, 0, RTW_REG_DOMAIN_NL},
5294 	{0x430, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5295 	{0x434, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5296 	{0x42a, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5297 	{0x426, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5298 	{0x45e, BIT(3), RTW_REG_DOMAIN_MAC8},
5299 	{0x454, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5300 	{0, 0, RTW_REG_DOMAIN_NL},
5301 	{0x4c, BIT(24) | BIT(23), RTW_REG_DOMAIN_MAC32},
5302 	{0x64, BIT(0), RTW_REG_DOMAIN_MAC8},
5303 	{0x4c6, BIT(4), RTW_REG_DOMAIN_MAC8},
5304 	{0x40, BIT(5), RTW_REG_DOMAIN_MAC8},
5305 	{0x1, RFREG_MASK, RTW_REG_DOMAIN_RF_B},
5306 	{0, 0, RTW_REG_DOMAIN_NL},
5307 	{0x550, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5308 	{0x522, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5309 	{0x953, BIT(1), RTW_REG_DOMAIN_MAC8},
5310 	{0xc50, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5311 };
5312 
5313 const struct rtw_chip_info rtw8822c_hw_spec = {
5314 	.ops = &rtw8822c_ops,
5315 	.id = RTW_CHIP_TYPE_8822C,
5316 	.fw_name = "rtw88/rtw8822c_fw.bin",
5317 	.wlan_cpu = RTW_WCPU_11AC,
5318 	.tx_pkt_desc_sz = 48,
5319 	.tx_buf_desc_sz = 16,
5320 	.rx_pkt_desc_sz = 24,
5321 	.rx_buf_desc_sz = 8,
5322 	.phy_efuse_size = 512,
5323 	.log_efuse_size = 768,
5324 	.ptct_efuse_size = 124,
5325 	.txff_size = 262144,
5326 	.rxff_size = 24576,
5327 	.fw_rxff_size = 12288,
5328 	.txgi_factor = 2,
5329 	.is_pwr_by_rate_dec = false,
5330 	.max_power_index = 0x7f,
5331 	.csi_buf_pg_num = 50,
5332 	.band = RTW_BAND_2G | RTW_BAND_5G,
5333 	.page_size = TX_PAGE_SIZE,
5334 	.dig_min = 0x20,
5335 	.default_1ss_tx_path = BB_PATH_A,
5336 	.path_div_supported = true,
5337 	.ht_supported = true,
5338 	.vht_supported = true,
5339 	.lps_deep_mode_supported = BIT(LPS_DEEP_MODE_LCLK) | BIT(LPS_DEEP_MODE_PG),
5340 	.sys_func_en = 0xD8,
5341 	.pwr_on_seq = card_enable_flow_8822c,
5342 	.pwr_off_seq = card_disable_flow_8822c,
5343 	.page_table = page_table_8822c,
5344 	.rqpn_table = rqpn_table_8822c,
5345 	.prioq_addrs = &prioq_addrs_8822c,
5346 	.intf_table = &phy_para_table_8822c,
5347 	.dig = rtw8822c_dig,
5348 	.dig_cck = NULL,
5349 	.rf_base_addr = {0x3c00, 0x4c00},
5350 	.rf_sipi_addr = {0x1808, 0x4108},
5351 	.ltecoex_addr = &rtw8822c_ltecoex_addr,
5352 	.mac_tbl = &rtw8822c_mac_tbl,
5353 	.agc_tbl = &rtw8822c_agc_tbl,
5354 	.bb_tbl = &rtw8822c_bb_tbl,
5355 	.rfk_init_tbl = &rtw8822c_array_mp_cal_init_tbl,
5356 	.rf_tbl = {&rtw8822c_rf_b_tbl, &rtw8822c_rf_a_tbl},
5357 	.rfe_defs = rtw8822c_rfe_defs,
5358 	.rfe_defs_size = ARRAY_SIZE(rtw8822c_rfe_defs),
5359 	.en_dis_dpd = true,
5360 	.dpd_ratemask = DIS_DPD_RATEALL,
5361 	.pwr_track_tbl = &rtw8822c_rtw_pwr_track_tbl,
5362 	.iqk_threshold = 8,
5363 	.lck_threshold = 8,
5364 	.bfer_su_max_num = 2,
5365 	.bfer_mu_max_num = 1,
5366 	.rx_ldpc = true,
5367 	.tx_stbc = true,
5368 	.edcca_th = rtw8822c_edcca_th,
5369 	.l2h_th_ini_cs = 60,
5370 	.l2h_th_ini_ad = 45,
5371 	.ampdu_density = IEEE80211_HT_MPDU_DENSITY_2,
5372 
5373 #ifdef CONFIG_PM
5374 	.wow_fw_name = "rtw88/rtw8822c_wow_fw.bin",
5375 	.wowlan_stub = &rtw_wowlan_stub_8822c,
5376 	.max_sched_scan_ssids = 4,
5377 #endif
5378 	.max_scan_ie_len = (RTW_PROBE_PG_CNT - 1) * TX_PAGE_SIZE,
5379 	.coex_para_ver = 0x22020720,
5380 	.bt_desired_ver = 0x20,
5381 	.scbd_support = true,
5382 	.new_scbd10_def = true,
5383 	.ble_hid_profile_support = true,
5384 	.wl_mimo_ps_support = true,
5385 	.pstdma_type = COEX_PSTDMA_FORCE_LPSOFF,
5386 	.bt_rssi_type = COEX_BTRSSI_DBM,
5387 	.ant_isolation = 15,
5388 	.rssi_tolerance = 2,
5389 	.wl_rssi_step = wl_rssi_step_8822c,
5390 	.bt_rssi_step = bt_rssi_step_8822c,
5391 	.table_sant_num = ARRAY_SIZE(table_sant_8822c),
5392 	.table_sant = table_sant_8822c,
5393 	.table_nsant_num = ARRAY_SIZE(table_nsant_8822c),
5394 	.table_nsant = table_nsant_8822c,
5395 	.tdma_sant_num = ARRAY_SIZE(tdma_sant_8822c),
5396 	.tdma_sant = tdma_sant_8822c,
5397 	.tdma_nsant_num = ARRAY_SIZE(tdma_nsant_8822c),
5398 	.tdma_nsant = tdma_nsant_8822c,
5399 	.wl_rf_para_num = ARRAY_SIZE(rf_para_tx_8822c),
5400 	.wl_rf_para_tx = rf_para_tx_8822c,
5401 	.wl_rf_para_rx = rf_para_rx_8822c,
5402 	.bt_afh_span_bw20 = 0x24,
5403 	.bt_afh_span_bw40 = 0x36,
5404 	.afh_5g_num = ARRAY_SIZE(afh_5g_8822c),
5405 	.afh_5g = afh_5g_8822c,
5406 
5407 	.coex_info_hw_regs_num = ARRAY_SIZE(coex_info_hw_regs_8822c),
5408 	.coex_info_hw_regs = coex_info_hw_regs_8822c,
5409 
5410 	.fw_fifo_addr = {0x780, 0x700, 0x780, 0x660, 0x650, 0x680},
5411 	.fwcd_segs = &rtw8822c_fwcd_segs,
5412 };
5413 EXPORT_SYMBOL(rtw8822c_hw_spec);
5414 
5415 MODULE_FIRMWARE("rtw88/rtw8822c_fw.bin");
5416 MODULE_FIRMWARE("rtw88/rtw8822c_wow_fw.bin");
5417 
5418 MODULE_AUTHOR("Realtek Corporation");
5419 MODULE_DESCRIPTION("Realtek 802.11ac wireless 8822c driver");
5420 MODULE_LICENSE("Dual BSD/GPL");
5421