• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) Marvell International Ltd. and its affiliates
4  */
5 
6 #include "ddr3_init.h"
7 #include "mv_ddr_training_db.h"
8 #include "mv_ddr_regs.h"
9 
10 u8 is_reg_dump = 0;
11 u8 debug_pbs = DEBUG_LEVEL_ERROR;
12 
13 /*
14  * API to change flags outside of the lib
15  */
16 #if defined(SILENT_LIB)
ddr3_hws_set_log_level(enum ddr_lib_debug_block block,u8 level)17 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
18 {
19 	/* do nothing */
20 }
21 #else /* SILENT_LIB */
22 /* Debug flags for other Training modules */
23 u8 debug_training_static = DEBUG_LEVEL_ERROR;
24 u8 debug_training = DEBUG_LEVEL_ERROR;
25 u8 debug_leveling = DEBUG_LEVEL_ERROR;
26 u8 debug_centralization = DEBUG_LEVEL_ERROR;
27 u8 debug_training_ip = DEBUG_LEVEL_ERROR;
28 u8 debug_training_bist = DEBUG_LEVEL_ERROR;
29 u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
30 u8 debug_training_access = DEBUG_LEVEL_ERROR;
31 u8 debug_training_device = DEBUG_LEVEL_ERROR;
32 
33 
mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)34 void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
35 {
36 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
37 	ddr3_hws_set_log_level(block, tm->debug_level);
38 };
39 
ddr3_hws_set_log_level(enum ddr_lib_debug_block block,u8 level)40 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
41 {
42 	switch (block) {
43 	case DEBUG_BLOCK_STATIC:
44 		debug_training_static = level;
45 		break;
46 	case DEBUG_BLOCK_TRAINING_MAIN:
47 		debug_training = level;
48 		break;
49 	case DEBUG_BLOCK_LEVELING:
50 		debug_leveling = level;
51 		break;
52 	case DEBUG_BLOCK_CENTRALIZATION:
53 		debug_centralization = level;
54 		break;
55 	case DEBUG_BLOCK_PBS:
56 		debug_pbs = level;
57 		break;
58 	case DEBUG_BLOCK_ALG:
59 		debug_training_hw_alg = level;
60 		break;
61 	case DEBUG_BLOCK_DEVICE:
62 		debug_training_device = level;
63 		break;
64 	case DEBUG_BLOCK_ACCESS:
65 		debug_training_access = level;
66 		break;
67 	case DEBUG_STAGES_REG_DUMP:
68 		if (level == DEBUG_LEVEL_TRACE)
69 			is_reg_dump = 1;
70 		else
71 			is_reg_dump = 0;
72 		break;
73 	case DEBUG_BLOCK_ALL:
74 	default:
75 		debug_training_static = level;
76 		debug_training = level;
77 		debug_leveling = level;
78 		debug_centralization = level;
79 		debug_pbs = level;
80 		debug_training_hw_alg = level;
81 		debug_training_access = level;
82 		debug_training_device = level;
83 	}
84 }
85 #endif /* SILENT_LIB */
86 
87 #if defined(DDR_VIEWER_TOOL)
88 static char *convert_freq(enum mv_ddr_freq freq);
89 #if defined(EXCLUDE_SWITCH_DEBUG)
90 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
91 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
92 u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
93 u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
94 #endif /* EXCLUDE_SWITCH_DEBUG */
95 #endif /* DDR_VIEWER_TOOL */
96 
97 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
98 u8 is_default_centralization = 0;
99 u8 is_tune_result = 0;
100 u8 is_validate_window_per_if = 0;
101 u8 is_validate_window_per_pup = 0;
102 u8 sweep_cnt = 1;
103 u32 is_bist_reset_bit = 1;
104 u8 is_run_leveling_sweep_tests;
105 
106 static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
107 
108 /*
109  * Dump Dunit & Phy registers
110  */
ddr3_tip_reg_dump(u32 dev_num)111 int ddr3_tip_reg_dump(u32 dev_num)
112 {
113 	u32 if_id, reg_addr, data_value, bus_id;
114 	u32 read_data[MAX_INTERFACE_NUM];
115 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
116 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
117 
118 	printf("-- dunit registers --\n");
119 	for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
120 		printf("0x%x ", reg_addr);
121 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
122 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
123 			CHECK_STATUS(ddr3_tip_if_read
124 				     (dev_num, ACCESS_TYPE_UNICAST,
125 				      if_id, reg_addr, read_data,
126 				      MASK_ALL_BITS));
127 			printf("0x%x ", read_data[if_id]);
128 		}
129 		printf("\n");
130 	}
131 
132 	printf("-- Phy registers --\n");
133 	for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
134 		printf("0x%x ", reg_addr);
135 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
136 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
137 			for (bus_id = 0;
138 			     bus_id < octets_per_if_num;
139 			     bus_id++) {
140 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
141 				CHECK_STATUS(ddr3_tip_bus_read
142 					     (dev_num, if_id,
143 					      ACCESS_TYPE_UNICAST, bus_id,
144 					      DDR_PHY_DATA, reg_addr,
145 					      &data_value));
146 				printf("0x%x ", data_value);
147 			}
148 			for (bus_id = 0;
149 			     bus_id < octets_per_if_num;
150 			     bus_id++) {
151 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
152 				CHECK_STATUS(ddr3_tip_bus_read
153 					     (dev_num, if_id,
154 					      ACCESS_TYPE_UNICAST, bus_id,
155 					      DDR_PHY_CONTROL, reg_addr,
156 					      &data_value));
157 				printf("0x%x ", data_value);
158 			}
159 		}
160 		printf("\n");
161 	}
162 
163 	return MV_OK;
164 }
165 
166 /*
167  * Register access func registration
168  */
ddr3_tip_init_config_func(u32 dev_num,struct hws_tip_config_func_db * config_func)169 int ddr3_tip_init_config_func(u32 dev_num,
170 			      struct hws_tip_config_func_db *config_func)
171 {
172 	if (config_func == NULL)
173 		return MV_BAD_PARAM;
174 
175 	memcpy(&config_func_info[dev_num], config_func,
176 	       sizeof(struct hws_tip_config_func_db));
177 
178 	return MV_OK;
179 }
180 
181 /*
182  * Get training result info pointer
183  */
ddr3_tip_get_result_ptr(u32 stage)184 enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
185 {
186 	return training_result[stage];
187 }
188 
189 /*
190  * Device info read
191  */
ddr3_tip_get_device_info(u32 dev_num,struct ddr3_device_info * info_ptr)192 int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
193 {
194 	if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
195 		return config_func_info[dev_num].
196 			tip_get_device_info_func((u8) dev_num, info_ptr);
197 	}
198 
199 	return MV_FAIL;
200 }
201 
202 #if defined(DDR_VIEWER_TOOL)
203 /*
204  * Convert freq to character string
205  */
convert_freq(enum mv_ddr_freq freq)206 static char *convert_freq(enum mv_ddr_freq freq)
207 {
208 	switch (freq) {
209 	case MV_DDR_FREQ_LOW_FREQ:
210 		return "MV_DDR_FREQ_LOW_FREQ";
211 
212 	case MV_DDR_FREQ_400:
213 		return "400";
214 
215 	case MV_DDR_FREQ_533:
216 		return "533";
217 
218 	case MV_DDR_FREQ_667:
219 		return "667";
220 
221 	case MV_DDR_FREQ_800:
222 		return "800";
223 
224 	case MV_DDR_FREQ_933:
225 		return "933";
226 
227 	case MV_DDR_FREQ_1066:
228 		return "1066";
229 
230 	case MV_DDR_FREQ_311:
231 		return "311";
232 
233 	case MV_DDR_FREQ_333:
234 		return "333";
235 
236 	case MV_DDR_FREQ_467:
237 		return "467";
238 
239 	case MV_DDR_FREQ_850:
240 		return "850";
241 
242 	case MV_DDR_FREQ_900:
243 		return "900";
244 
245 	case MV_DDR_FREQ_360:
246 		return "MV_DDR_FREQ_360";
247 
248 	case MV_DDR_FREQ_1000:
249 		return "MV_DDR_FREQ_1000";
250 
251 	default:
252 		return "Unknown Frequency";
253 	}
254 }
255 
256 /*
257  * Convert device ID to character string
258  */
convert_dev_id(u32 dev_id)259 static char *convert_dev_id(u32 dev_id)
260 {
261 	switch (dev_id) {
262 	case 0x6800:
263 		return "A38xx";
264 	case 0x6900:
265 		return "A39XX";
266 	case 0xf400:
267 		return "AC3";
268 	case 0xfc00:
269 		return "BC2";
270 
271 	default:
272 		return "Unknown Device";
273 	}
274 }
275 
276 /*
277  * Convert device ID to character string
278  */
convert_mem_size(u32 dev_id)279 static char *convert_mem_size(u32 dev_id)
280 {
281 	switch (dev_id) {
282 	case 0:
283 		return "512 MB";
284 	case 1:
285 		return "1 GB";
286 	case 2:
287 		return "2 GB";
288 	case 3:
289 		return "4 GB";
290 	case 4:
291 		return "8 GB";
292 
293 	default:
294 		return "wrong mem size";
295 	}
296 }
297 
print_device_info(u8 dev_num)298 int print_device_info(u8 dev_num)
299 {
300 	struct ddr3_device_info info_ptr;
301 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
302 
303 	CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
304 	printf("=== DDR setup START===\n");
305 	printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
306 	printf("\tDDR3  CK delay: %d\n", info_ptr.ck_delay);
307 	print_topology(tm);
308 	printf("=== DDR setup END===\n");
309 
310 	return MV_OK;
311 }
312 
hws_ddr3_tip_sweep_test(int enable)313 void hws_ddr3_tip_sweep_test(int enable)
314 {
315 	if (enable) {
316 		is_validate_window_per_if = 1;
317 		is_validate_window_per_pup = 1;
318 		debug_training = DEBUG_LEVEL_TRACE;
319 	} else {
320 		is_validate_window_per_if = 0;
321 		is_validate_window_per_pup = 0;
322 	}
323 }
324 #endif /* DDR_VIEWER_TOOL */
325 
ddr3_tip_convert_tune_result(enum hws_result tune_result)326 char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
327 {
328 	switch (tune_result) {
329 	case TEST_FAILED:
330 		return "FAILED";
331 	case TEST_SUCCESS:
332 		return "PASS";
333 	case NO_TEST_DONE:
334 		return "NOT COMPLETED";
335 	default:
336 		return "Un-KNOWN";
337 	}
338 }
339 
340 /*
341  * Print log info
342  */
ddr3_tip_print_log(u32 dev_num,u32 mem_addr)343 int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
344 {
345 	u32 if_id = 0;
346 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
347 
348 #if defined(DDR_VIEWER_TOOL)
349 	if ((is_validate_window_per_if != 0) ||
350 	    (is_validate_window_per_pup != 0)) {
351 		u32 is_pup_log = 0;
352 		enum mv_ddr_freq freq;
353 
354 		freq = tm->interface_params[first_active_if].memory_freq;
355 
356 		is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
357 		printf("===VALIDATE WINDOW LOG START===\n");
358 		printf("DDR Frequency: %s   ======\n", convert_freq(freq));
359 		/* print sweep windows */
360 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
361 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
362 #if defined(EXCLUDE_SWITCH_DEBUG)
363 		if (is_run_leveling_sweep_tests == 1) {
364 			ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
365 			ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
366 		}
367 #endif /* EXCLUDE_SWITCH_DEBUG */
368 		ddr3_tip_print_all_pbs_result(dev_num);
369 		ddr3_tip_print_wl_supp_result(dev_num);
370 		printf("===VALIDATE WINDOW LOG END ===\n");
371 		CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
372 		ddr3_tip_reg_dump(dev_num);
373 	}
374 #endif /* DDR_VIEWER_TOOL */
375 
376 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
377 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
378 
379 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
380 				  ("IF %d Status:\n", if_id));
381 
382 		if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
383 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
384 					  ("\tInit Controller: %s\n",
385 					   ddr3_tip_convert_tune_result
386 					   (training_result[INIT_CONTROLLER]
387 					    [if_id])));
388 		}
389 		if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
390 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
391 					  ("\tLow freq Config: %s\n",
392 					   ddr3_tip_convert_tune_result
393 					   (training_result[SET_LOW_FREQ]
394 					    [if_id])));
395 		}
396 		if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
397 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
398 					  ("\tLoad Pattern: %s\n",
399 					   ddr3_tip_convert_tune_result
400 					   (training_result[LOAD_PATTERN]
401 					    [if_id])));
402 		}
403 		if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
404 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
405 					  ("\tMedium freq Config: %s\n",
406 					   ddr3_tip_convert_tune_result
407 					   (training_result[SET_MEDIUM_FREQ]
408 					    [if_id])));
409 		}
410 		if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
411 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
412 					  ("\tWL: %s\n",
413 					   ddr3_tip_convert_tune_result
414 					   (training_result[WRITE_LEVELING]
415 					    [if_id])));
416 		}
417 		if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
418 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
419 					  ("\tLoad Pattern: %s\n",
420 					   ddr3_tip_convert_tune_result
421 					   (training_result[LOAD_PATTERN_2]
422 					    [if_id])));
423 		}
424 		if (mask_tune_func & READ_LEVELING_MASK_BIT) {
425 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
426 					  ("\tRL: %s\n",
427 					   ddr3_tip_convert_tune_result
428 					   (training_result[READ_LEVELING]
429 					    [if_id])));
430 		}
431 		if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
432 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
433 					  ("\tWL Supp: %s\n",
434 					   ddr3_tip_convert_tune_result
435 					   (training_result[WRITE_LEVELING_SUPP]
436 					    [if_id])));
437 		}
438 		if (mask_tune_func & PBS_RX_MASK_BIT) {
439 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
440 					  ("\tPBS RX: %s\n",
441 					   ddr3_tip_convert_tune_result
442 					   (training_result[PBS_RX]
443 					    [if_id])));
444 		}
445 		if (mask_tune_func & PBS_TX_MASK_BIT) {
446 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
447 					  ("\tPBS TX: %s\n",
448 					   ddr3_tip_convert_tune_result
449 					   (training_result[PBS_TX]
450 					    [if_id])));
451 		}
452 		if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
453 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
454 					  ("\tTarget freq Config: %s\n",
455 					   ddr3_tip_convert_tune_result
456 					   (training_result[SET_TARGET_FREQ]
457 					    [if_id])));
458 		}
459 		if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
460 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
461 					  ("\tWL TF: %s\n",
462 					   ddr3_tip_convert_tune_result
463 					   (training_result[WRITE_LEVELING_TF]
464 					    [if_id])));
465 		}
466 		if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
467 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
468 					  ("\tRL TF: %s\n",
469 					   ddr3_tip_convert_tune_result
470 					   (training_result[READ_LEVELING_TF]
471 					    [if_id])));
472 		}
473 		if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
474 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
475 					  ("\tWL TF Supp: %s\n",
476 					   ddr3_tip_convert_tune_result
477 					   (training_result
478 					    [WRITE_LEVELING_SUPP_TF]
479 					    [if_id])));
480 		}
481 		if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
482 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
483 					  ("\tCentr RX: %s\n",
484 					   ddr3_tip_convert_tune_result
485 					   (training_result[CENTRALIZATION_RX]
486 					    [if_id])));
487 		}
488 		if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
489 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
490 					  ("\tVREF_CALIBRATION: %s\n",
491 					   ddr3_tip_convert_tune_result
492 					   (training_result[VREF_CALIBRATION]
493 					    [if_id])));
494 		}
495 		if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
496 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
497 					  ("\tCentr TX: %s\n",
498 					   ddr3_tip_convert_tune_result
499 					   (training_result[CENTRALIZATION_TX]
500 					    [if_id])));
501 		}
502 	}
503 
504 	return MV_OK;
505 }
506 
507 #if !defined(EXCLUDE_DEBUG_PRINTS)
508 /*
509  * Print stability log info
510  */
ddr3_tip_print_stability_log(u32 dev_num)511 int ddr3_tip_print_stability_log(u32 dev_num)
512 {
513 	u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
514 	u32 reg_data;
515 	u32 read_data[MAX_INTERFACE_NUM];
516 	unsigned int max_cs = mv_ddr_cs_num_get();
517 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
518 
519 	/* Title print */
520 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
521 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
522 		printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
523 		for (csindex = 0; csindex < max_cs; csindex++) {
524 			printf("CS%d , ", csindex);
525 			printf("\n");
526 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
527 			printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
528 			printf("\t\t");
529 			for (idx = 0; idx < 11; idx++)
530 				printf("PBSTx-Pad%d,", idx);
531 			printf("\t\t");
532 			for (idx = 0; idx < 11; idx++)
533 				printf("PBSRx-Pad%d,", idx);
534 		}
535 	}
536 	printf("\n");
537 
538 	/* Data print */
539 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
540 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
541 
542 		printf("Data: %d,%d,", if_id,
543 		       (config_func_info[dev_num].tip_get_temperature != NULL)
544 		       ? (config_func_info[dev_num].
545 			  tip_get_temperature(dev_num)) : (0));
546 
547 		CHECK_STATUS(ddr3_tip_if_read
548 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
549 			      read_data, MASK_ALL_BITS));
550 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
551 		       ((read_data[if_id] & 0xfc00) >> 10));
552 		CHECK_STATUS(ddr3_tip_if_read
553 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
554 			      read_data, MASK_ALL_BITS));
555 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
556 		       ((read_data[if_id] & 0xfc00) >> 10));
557 		CHECK_STATUS(ddr3_tip_if_read
558 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
559 			      read_data, MASK_ALL_BITS));
560 		printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
561 		       ((read_data[if_id] & 0xfc00000) >> 22));
562 
563 		for (csindex = 0; csindex < max_cs; csindex++) {
564 			printf("CS%d , ", csindex);
565 			for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
566 				printf("\n");
567 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
568 				ddr3_tip_bus_read(dev_num, if_id,
569 						  ACCESS_TYPE_UNICAST,
570 						  bus_id, DDR_PHY_DATA,
571 						  RESULT_PHY_REG +
572 						  csindex, &reg_data);
573 				printf("%d,%d,", (reg_data & 0x1f),
574 				       ((reg_data & 0x3e0) >> 5));
575 				/* WL */
576 				ddr3_tip_bus_read(dev_num, if_id,
577 						  ACCESS_TYPE_UNICAST,
578 						  bus_id, DDR_PHY_DATA,
579 						  WL_PHY_REG(csindex),
580 						  &reg_data);
581 				printf("%d,%d,%d,",
582 				       (reg_data & 0x1f) +
583 				       ((reg_data & 0x1c0) >> 6) * 32,
584 				       (reg_data & 0x1f),
585 				       (reg_data & 0x1c0) >> 6);
586 				/* RL */
587 				CHECK_STATUS(ddr3_tip_if_read
588 					     (dev_num, ACCESS_TYPE_UNICAST,
589 					      if_id,
590 					      RD_DATA_SMPL_DLYS_REG,
591 					      read_data, MASK_ALL_BITS));
592 				read_data[if_id] =
593 					(read_data[if_id] &
594 					 (0x1f << (8 * csindex))) >>
595 					(8 * csindex);
596 				ddr3_tip_bus_read(dev_num, if_id,
597 						  ACCESS_TYPE_UNICAST, bus_id,
598 						  DDR_PHY_DATA,
599 						  RL_PHY_REG(csindex),
600 						  &reg_data);
601 				printf("%d,%d,%d,%d,",
602 				       (reg_data & 0x1f) +
603 				       ((reg_data & 0x1c0) >> 6) * 32 +
604 				       read_data[if_id] * 64,
605 				       (reg_data & 0x1f),
606 				       ((reg_data & 0x1c0) >> 6),
607 				       read_data[if_id]);
608 				/* Centralization */
609 				ddr3_tip_bus_read(dev_num, if_id,
610 						  ACCESS_TYPE_UNICAST, bus_id,
611 						  DDR_PHY_DATA,
612 						  CTX_PHY_REG(csindex),
613 						  &reg_data);
614 				printf("%d,", (reg_data & 0x3f));
615 				ddr3_tip_bus_read(dev_num, if_id,
616 						  ACCESS_TYPE_UNICAST, bus_id,
617 						  DDR_PHY_DATA,
618 						  CRX_PHY_REG(csindex),
619 						   &reg_data);
620 				printf("%d,", (reg_data & 0x1f));
621 				/* Vref */
622 				ddr3_tip_bus_read(dev_num, if_id,
623 						  ACCESS_TYPE_UNICAST, bus_id,
624 						  DDR_PHY_DATA,
625 						  PAD_CFG_PHY_REG,
626 						  &reg_data);
627 				printf("%d,", (reg_data & 0x7));
628 				/* DQVref */
629 				/* Need to add the Read Function from device */
630 				printf("%d,", 0);
631 				printf("\t\t");
632 				for (idx = 0; idx < 11; idx++) {
633 					ddr3_tip_bus_read(dev_num, if_id,
634 							  ACCESS_TYPE_UNICAST,
635 							  bus_id, DDR_PHY_DATA,
636 							  0x10 +
637 							  16 * csindex +
638 							  idx, &reg_data);
639 					printf("%d,", (reg_data & 0x3f));
640 				}
641 				printf("\t\t");
642 				for (idx = 0; idx < 11; idx++) {
643 					ddr3_tip_bus_read(dev_num, if_id,
644 							  ACCESS_TYPE_UNICAST,
645 							  bus_id, DDR_PHY_DATA,
646 							  0x50 +
647 							  16 * csindex +
648 							  idx, &reg_data);
649 					printf("%d,", (reg_data & 0x3f));
650 				}
651 			}
652 		}
653 	}
654 	printf("\n");
655 
656 	return MV_OK;
657 }
658 #endif /* EXCLUDE_DEBUG_PRINTS */
659 
660 /*
661  * Register XSB information
662  */
ddr3_tip_register_xsb_info(u32 dev_num,struct hws_xsb_info * xsb_info_table)663 int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
664 {
665 	memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
666 	return MV_OK;
667 }
668 
669 /*
670  * Read ADLL Value
671  */
ddr3_tip_read_adll_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 reg_addr,u32 mask)672 int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
673 			     u32 reg_addr, u32 mask)
674 {
675 	u32 data_value;
676 	u32 if_id = 0, bus_id = 0;
677 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
678 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
679 
680 	/*
681 	 * multi CS support - reg_addr is calucalated in calling function
682 	 * with CS offset
683 	 */
684 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
685 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
686 		for (bus_id = 0; bus_id < octets_per_if_num;
687 		     bus_id++) {
688 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
689 			CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
690 						       ACCESS_TYPE_UNICAST,
691 						       bus_id,
692 						       DDR_PHY_DATA, reg_addr,
693 						       &data_value));
694 			pup_values[if_id *
695 				   octets_per_if_num + bus_id] =
696 				data_value & mask;
697 		}
698 	}
699 
700 	return 0;
701 }
702 
703 /*
704  * Write ADLL Value
705  */
ddr3_tip_write_adll_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 reg_addr)706 int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
707 			      u32 reg_addr)
708 {
709 	u32 if_id = 0, bus_id = 0;
710 	u32 data;
711 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
712 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
713 
714 	/*
715 	 * multi CS support - reg_addr is calucalated in calling function
716 	 * with CS offset
717 	 */
718 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
719 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
720 		for (bus_id = 0; bus_id < octets_per_if_num;
721 		     bus_id++) {
722 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
723 			data = pup_values[if_id *
724 					  octets_per_if_num +
725 					  bus_id];
726 			CHECK_STATUS(ddr3_tip_bus_write(dev_num,
727 							ACCESS_TYPE_UNICAST,
728 							if_id,
729 							ACCESS_TYPE_UNICAST,
730 							bus_id, DDR_PHY_DATA,
731 							reg_addr, data));
732 		}
733 	}
734 
735 	return 0;
736 }
737 
738 /**
739  * Read Phase Value
740  */
read_phase_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],int reg_addr,u32 mask)741 int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
742 		     int reg_addr, u32 mask)
743 {
744 	u32  data_value;
745 	u32 if_id = 0, bus_id = 0;
746 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
747 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
748 
749 	/* multi CS support - reg_addr is calucalated in calling function with CS offset */
750 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
751 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
752 		for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
753 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
754 			CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
755 						       ACCESS_TYPE_UNICAST,
756 						       bus_id,
757 						       DDR_PHY_DATA, reg_addr,
758 						       &data_value));
759 			pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
760 		}
761 	}
762 
763 	return 0;
764 }
765 
766 /**
767  * Write Leveling Value
768  */
write_leveling_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],int reg_addr)769 int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
770 			 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
771 {
772 	u32 if_id = 0, bus_id = 0;
773 	u32 data;
774 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
775 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
776 
777 	/* multi CS support - reg_addr is calucalated in calling function with CS offset */
778 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
779 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
780 		for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
781 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
782 			data = pup_values[if_id * octets_per_if_num + bus_id] +
783 			       pup_ph_values[if_id * octets_per_if_num + bus_id];
784 			CHECK_STATUS(ddr3_tip_bus_write(dev_num,
785 							ACCESS_TYPE_UNICAST,
786 							if_id,
787 							ACCESS_TYPE_UNICAST,
788 							bus_id,
789 							DDR_PHY_DATA,
790 							reg_addr,
791 							data));
792 		}
793 	}
794 
795 	return 0;
796 }
797 
798 #if !defined(EXCLUDE_SWITCH_DEBUG)
799 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
800 u32 start_xsb_offset = 0;
801 u8 is_rl_old = 0;
802 u8 is_freq_old = 0;
803 u8 is_dfs_disabled = 0;
804 u32 default_centrlization_value = 0x12;
805 u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
806 	rl_test = 0, reset_read_fifo = 0;
807 int debug_acc = 0;
808 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
809 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
810 
811 u32 xsb_test_table[][8] = {
812 	{0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
813 	 0x66666666, 0x77777777},
814 	{0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
815 	 0xeeeeeeee, 0xffffffff},
816 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
817 	 0x00000000, 0xffffffff},
818 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
819 	 0x00000000, 0xffffffff},
820 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
821 	 0x00000000, 0xffffffff},
822 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
823 	 0x00000000, 0xffffffff},
824 	{0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
825 	 0xffffffff, 0xffffffff},
826 	{0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
827 	 0x00000000, 0x00000000},
828 	{0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
829 	 0xffffffff, 0xffffffff}
830 };
831 
ddr3_tip_print_adll(void)832 int ddr3_tip_print_adll(void)
833 {
834 	u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
835 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
836 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
837 
838 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
839 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
840 		for (bus_cnt = 0; bus_cnt < octets_per_if_num;
841 		     bus_cnt++) {
842 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
843 			CHECK_STATUS(ddr3_tip_bus_read
844 				     (dev_num, if_id,
845 				      ACCESS_TYPE_UNICAST, bus_cnt,
846 				      DDR_PHY_DATA, 0x1, &data_p1));
847 			CHECK_STATUS(ddr3_tip_bus_read
848 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
849 				      bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
850 			CHECK_STATUS(ddr3_tip_bus_read
851 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
852 				      bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
853 			DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
854 					  (" IF %d bus_cnt %d  phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
855 					   if_id, bus_cnt, data_p1, data_p2,
856 					   ui_data3));
857 			}
858 	}
859 
860 	return MV_OK;
861 }
862 
863 #endif /* EXCLUDE_SWITCH_DEBUG */
864 
865 #if defined(DDR_VIEWER_TOOL)
866 /*
867  * Print ADLL
868  */
print_adll(u32 dev_num,u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])869 int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
870 {
871 	u32 i, j;
872 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
873 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
874 
875 	for (j = 0; j < octets_per_if_num; j++) {
876 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
877 		for (i = 0; i < MAX_INTERFACE_NUM; i++)
878 			printf("%d ,", adll[i * octets_per_if_num + j]);
879 	}
880 	printf("\n");
881 
882 	return MV_OK;
883 }
884 
print_ph(u32 dev_num,u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])885 int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
886 {
887 	u32 i, j;
888 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
889 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
890 
891 	for (j = 0; j < octets_per_if_num; j++) {
892 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
893 		for (i = 0; i < MAX_INTERFACE_NUM; i++)
894 			printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
895 	}
896 	printf("\n");
897 
898 	return MV_OK;
899 }
900 #endif /* DDR_VIEWER_TOOL */
901 
902 #if !defined(EXCLUDE_SWITCH_DEBUG)
903 /* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
ddr3_tip_compare(u32 if_id,u32 * p_src,u32 * p_dst,u32 byte_index)904 static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
905 			    u32 byte_index)
906 {
907 	u32 burst_cnt = 0, addr_offset, i_id;
908 	int b_is_fail = 0;
909 
910 	addr_offset =
911 		(byte_index ==
912 		 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
913 	for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
914 		if ((p_src[burst_cnt] & addr_offset) !=
915 		    (p_dst[if_id] & addr_offset))
916 			b_is_fail = 1;
917 	}
918 
919 	if (b_is_fail == 1) {
920 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
921 				  ("IF %d exp: ", if_id));
922 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
923 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
924 					  ("0x%8x ", p_src[i_id]));
925 		}
926 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
927 				  ("\n_i_f %d rcv: ", if_id));
928 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
929 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
930 					  ("(0x%8x ", p_dst[i_id]));
931 		}
932 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
933 	}
934 
935 	return b_is_fail;
936 }
937 #endif /* EXCLUDE_SWITCH_DEBUG */
938 
939 #if defined(DDR_VIEWER_TOOL)
940 /*
941  * Sweep validation
942  */
ddr3_tip_run_sweep_test(int dev_num,u32 repeat_num,u32 direction,u32 mode)943 int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
944 			    u32 mode)
945 {
946 	u32 pup = 0, start_pup = 0, end_pup = 0;
947 	u32 adll = 0, rep = 0, pattern_idx = 0;
948 	u32 res[MAX_INTERFACE_NUM] = { 0 };
949 	int if_id = 0;
950 	u32 adll_value = 0;
951 	u32 reg;
952 	enum hws_access_type pup_access;
953 	u32 cs;
954 	unsigned int max_cs = mv_ddr_cs_num_get();
955 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
956 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
957 
958 	repeat_num = 2;
959 
960 	if (mode == 1) {
961 		/* per pup */
962 		start_pup = 0;
963 		end_pup = octets_per_if_num - 1;
964 		pup_access = ACCESS_TYPE_UNICAST;
965 	} else {
966 		start_pup = 0;
967 		end_pup = 0;
968 		pup_access = ACCESS_TYPE_MULTICAST;
969 	}
970 
971 	for (cs = 0; cs < max_cs; cs++) {
972 		reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
973 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
974 			for (if_id = 0;
975 			     if_id <= MAX_INTERFACE_NUM - 1;
976 			     if_id++) {
977 				VALIDATE_IF_ACTIVE
978 					(tm->if_act_mask,
979 					 if_id);
980 				for (pup = start_pup; pup <= end_pup; pup++) {
981 					ctrl_sweepres[adll][if_id][pup] =
982 						0;
983 				}
984 			}
985 		}
986 
987 		for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
988 			ctrl_adll[adll] = 0;
989 			/* Save DQS value(after algorithm run) */
990 			ddr3_tip_read_adll_value(dev_num, ctrl_adll,
991 						 reg, MASK_ALL_BITS);
992 
993 		/*
994 		 * Sweep ADLL  from 0:31 on all I/F on all Pup and perform
995 		 * BIST on each stage.
996 		 */
997 		for (pup = start_pup; pup <= end_pup; pup++) {
998 			for (adll = 0; adll < ADLL_LENGTH; adll++) {
999 				for (rep = 0; rep < repeat_num; rep++) {
1000 					for (pattern_idx = PATTERN_KILLER_DQ0;
1001 					     pattern_idx < PATTERN_LAST;
1002 					     pattern_idx++) {
1003 						adll_value =
1004 							(direction == 0) ? (adll * 2) : adll;
1005 						CHECK_STATUS(ddr3_tip_bus_write
1006 							     (dev_num, ACCESS_TYPE_MULTICAST, 0,
1007 							      pup_access, pup, DDR_PHY_DATA,
1008 							      reg, adll_value));
1009 						hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1010 								  cs);
1011 						/* ddr3_tip_reset_fifo_ptr(dev_num); */
1012 						for (if_id = 0;
1013 						     if_id < MAX_INTERFACE_NUM;
1014 						     if_id++) {
1015 							VALIDATE_IF_ACTIVE
1016 								(tm->if_act_mask,
1017 								 if_id);
1018 							ctrl_sweepres[adll][if_id][pup]
1019 								+= res[if_id];
1020 							if (mode == 1) {
1021 								CHECK_STATUS
1022 									(ddr3_tip_bus_write
1023 									 (dev_num,
1024 									  ACCESS_TYPE_UNICAST,
1025 									  if_id,
1026 									  ACCESS_TYPE_UNICAST,
1027 									  pup,
1028 									  DDR_PHY_DATA,
1029 									  reg,
1030 									  ctrl_adll[if_id *
1031 										    cs *
1032 										    octets_per_if_num
1033 										    + pup]));
1034 							}
1035 						}
1036 					}
1037 				}
1038 			}
1039 		}
1040 		printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1041 		       ((direction == 0) ? "TX" : "RX"));
1042 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1043 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1044 			if (mode == 1) {
1045 				for (pup = start_pup; pup <= end_pup; pup++) {
1046 					VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1047 					printf("I/F%d-PHY%d , ", if_id, pup);
1048 				}
1049 			} else {
1050 				printf("I/F%d , ", if_id);
1051 			}
1052 		}
1053 		printf("\n");
1054 
1055 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1056 			adll_value = (direction == 0) ? (adll * 2) : adll;
1057 			printf("Final,%s, Sweep, Result, %d ,",
1058 			       ((direction == 0) ? "TX" : "RX"), adll_value);
1059 
1060 			for (if_id = 0;
1061 			     if_id <= MAX_INTERFACE_NUM - 1;
1062 			     if_id++) {
1063 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1064 				for (pup = start_pup; pup <= end_pup; pup++) {
1065 					printf("%8d , ",
1066 					       ctrl_sweepres[adll][if_id]
1067 					       [pup]);
1068 				}
1069 			}
1070 			printf("\n");
1071 		}
1072 
1073 		/*
1074 		 * Write back to the phy the Rx DQS value, we store in
1075 		 * the beginning.
1076 		 */
1077 		ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
1078 		/* print adll results */
1079 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1080 		printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1081 		print_adll(dev_num, ctrl_adll);
1082 	}
1083 	ddr3_tip_reset_fifo_ptr(dev_num);
1084 
1085 	return 0;
1086 }
1087 
1088 #if defined(EXCLUDE_SWITCH_DEBUG)
ddr3_tip_run_leveling_sweep_test(int dev_num,u32 repeat_num,u32 direction,u32 mode)1089 int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1090 				     u32 direction, u32 mode)
1091 {
1092 	u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1093 	u32 adll = 0, rep = 0, pattern_idx = 0;
1094 	u32 read_data[MAX_INTERFACE_NUM];
1095 	u32 res[MAX_INTERFACE_NUM] = { 0 };
1096 	int if_id = 0, gap = 0;
1097 	u32 adll_value = 0;
1098 	u32 reg;
1099 	enum hws_access_type pup_access;
1100 	u32 cs;
1101 	unsigned int max_cs = mv_ddr_cs_num_get();
1102 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1103 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1104 
1105 	if (mode == 1) { /* per pup */
1106 		start_pup = 0;
1107 		end_pup = octets_per_if_num - 1;
1108 		pup_access = ACCESS_TYPE_UNICAST;
1109 	} else {
1110 		start_pup = 0;
1111 		end_pup = 0;
1112 		pup_access = ACCESS_TYPE_MULTICAST;
1113 	}
1114 
1115 	for (cs = 0; cs < max_cs; cs++) {
1116 		reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1117 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1118 			for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1119 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1120 				for (pup = start_pup; pup <= end_pup; pup++)
1121 					ctrl_sweepres[adll][if_id][pup] = 0;
1122 			}
1123 		}
1124 
1125 		for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1126 			ctrl_adll[adll] = 0;
1127 			ctrl_level_phase[adll] = 0;
1128 			ctrl_adll1[adll] = 0;
1129 		}
1130 
1131 		/* save leveling value after running algorithm */
1132 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1133 		read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1134 
1135 		if (direction == 0)
1136 			ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1137 						 CTX_PHY_REG(cs), MASK_ALL_BITS);
1138 
1139 		/* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1140 		 * and perform BIST on each stage
1141 		 */
1142 		for (pup = start_pup; pup <= end_pup; pup++) {
1143 			for (adll = 0; adll < ADLL_LENGTH; adll++) {
1144 				for (rep = 0; rep < repeat_num; rep++) {
1145 					adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1146 					for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1147 						start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1148 							     (ctrl_level_phase[if_id * cs *
1149 									     octets_per_if_num +
1150 									     pup] >> 6) * 32;
1151 
1152 						if (direction == 0)
1153 							start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1154 						else
1155 							start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1156 
1157 						adll_value += start_adll;
1158 
1159 						gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1160 						      ctrl_adll[if_id * cs * octets_per_if_num + pup];
1161 						gap = (((adll_value % 32) + gap) % 64);
1162 
1163 						adll_value = ((adll_value % 32) +
1164 							       (((adll_value - (adll_value % 32)) / 32) << 6));
1165 
1166 						CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1167 										ACCESS_TYPE_UNICAST,
1168 										if_id,
1169 										pup_access,
1170 										pup,
1171 										DDR_PHY_DATA,
1172 										reg,
1173 										adll_value));
1174 						if (direction == 0)
1175 							CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1176 											ACCESS_TYPE_UNICAST,
1177 											if_id,
1178 											pup_access,
1179 											pup,
1180 											DDR_PHY_DATA,
1181 											CTX_PHY_REG(cs),
1182 											gap));
1183 					}
1184 
1185 					for (pattern_idx = PATTERN_KILLER_DQ0;
1186 					     pattern_idx < PATTERN_LAST;
1187 					     pattern_idx++) {
1188 						hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1189 						ddr3_tip_reset_fifo_ptr(dev_num);
1190 						for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1191 							VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1192 							if (pup != 4) { /* TODO: remove literal */
1193 								ctrl_sweepres[adll][if_id][pup] += res[if_id];
1194 							} else {
1195 								CHECK_STATUS(ddr3_tip_if_read(dev_num,
1196 											      ACCESS_TYPE_UNICAST,
1197 											      if_id,
1198 											      0x1458,
1199 											      read_data,
1200 											      MASK_ALL_BITS));
1201 								ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1202 								CHECK_STATUS(ddr3_tip_if_write(dev_num,
1203 											       ACCESS_TYPE_UNICAST,
1204 											       if_id,
1205 											       0x1458,
1206 											       0x0,
1207 											       0xFFFFFFFF));
1208 								CHECK_STATUS(ddr3_tip_if_write(dev_num,
1209 											       ACCESS_TYPE_UNICAST,
1210 											       if_id,
1211 											       0x145C,
1212 											       0x0,
1213 											       0xFFFFFFFF));
1214 							}
1215 						}
1216 					}
1217 				}
1218 			}
1219 
1220 			for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1221 				start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1222 					     ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1223 				CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1224 								DDR_PHY_DATA, reg, start_adll));
1225 				if (direction == 0)
1226 					CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1227 									ACCESS_TYPE_UNICAST,
1228 									if_id,
1229 									pup_access,
1230 									pup,
1231 									DDR_PHY_DATA,
1232 									CTX_PHY_REG(cs),
1233 									ctrl_adll1[if_id *
1234 										   cs *
1235 										   octets_per_if_num +
1236 										   pup]));
1237 			}
1238 		}
1239 
1240 		printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1241 
1242 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1243 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1244 			if (mode == 1) {
1245 				for (pup = start_pup; pup <= end_pup; pup++) {
1246 					VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1247 					printf("I/F%d-PHY%d , ", if_id, pup);
1248 				}
1249 			} else {
1250 				printf("I/F%d , ", if_id);
1251 			}
1252 		}
1253 		printf("\n");
1254 
1255 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1256 			adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1257 			printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1258 
1259 			for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1260 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1261 				for (pup = start_pup; pup <= end_pup; pup++)
1262 					printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1263 			}
1264 			printf("\n");
1265 		}
1266 
1267 		/* write back to the phy the Rx DQS value, we store in the beginning */
1268 		write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1269 		if (direction == 0)
1270 			ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1271 
1272 		/* print adll results */
1273 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1274 		printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1275 		print_adll(dev_num, ctrl_adll);
1276 		print_ph(dev_num, ctrl_level_phase);
1277 	}
1278 	ddr3_tip_reset_fifo_ptr(dev_num);
1279 
1280 	return 0;
1281 }
1282 #endif /* EXCLUDE_SWITCH_DEBUG */
1283 
print_topology(struct mv_ddr_topology_map * topology_db)1284 void print_topology(struct mv_ddr_topology_map *topology_db)
1285 {
1286 	u32 ui, uj;
1287 	u32 dev_num = 0;
1288 
1289 	printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1290 	printf("\tNumber of buses: 0x%x\n",
1291 	       ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
1292 	printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1293 
1294 	for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1295 		VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
1296 		printf("\n\tInterface ID: %d\n", ui);
1297 		printf("\t\tDDR Frequency: %s\n",
1298 		       convert_freq(topology_db->
1299 				    interface_params[ui].memory_freq));
1300 		printf("\t\tSpeed_bin: %d\n",
1301 		       topology_db->interface_params[ui].speed_bin_index);
1302 		printf("\t\tBus_width: %d\n",
1303 		       (4 << topology_db->interface_params[ui].bus_width));
1304 		printf("\t\tMem_size: %s\n",
1305 		       convert_mem_size(topology_db->
1306 					interface_params[ui].memory_size));
1307 		printf("\t\tCAS-WL: %d\n",
1308 		       topology_db->interface_params[ui].cas_wl);
1309 		printf("\t\tCAS-L: %d\n",
1310 		       topology_db->interface_params[ui].cas_l);
1311 		printf("\t\tTemperature: %d\n",
1312 		       topology_db->interface_params[ui].interface_temp);
1313 		printf("\n");
1314 		for (uj = 0; uj < 4; uj++) {
1315 			printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1316 			       topology_db->interface_params[ui].
1317 			       as_bus_params[uj].cs_bitmask);
1318 			printf("Mirror: 0x%x\t",
1319 			       topology_db->interface_params[ui].
1320 			       as_bus_params[uj].mirror_enable_bitmask);
1321 			printf("DQS Swap is %s \t",
1322 			       (topology_db->
1323 				interface_params[ui].as_bus_params[uj].
1324 				is_dqs_swap == 1) ? "enabled" : "disabled");
1325 			printf("Ck Swap:%s\t",
1326 			       (topology_db->
1327 				interface_params[ui].as_bus_params[uj].
1328 				is_ck_swap == 1) ? "enabled" : "disabled");
1329 			printf("\n");
1330 		}
1331 	}
1332 }
1333 #endif /* DDR_VIEWER_TOOL */
1334 
1335 #if !defined(EXCLUDE_SWITCH_DEBUG)
1336 /*
1337  * Execute XSB Test transaction (rd/wr/both)
1338  */
run_xsb_test(u32 dev_num,u32 mem_addr,u32 write_type,u32 read_type,u32 burst_length)1339 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1340 		 u32 read_type, u32 burst_length)
1341 {
1342 	u32 seq = 0, if_id = 0, addr, cnt;
1343 	int ret = MV_OK, ret_tmp;
1344 	u32 data_read[MAX_INTERFACE_NUM];
1345 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1346 
1347 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1348 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1349 		addr = mem_addr;
1350 		for (cnt = 0; cnt <= burst_length; cnt++) {
1351 			seq = (seq + 1) % 8;
1352 			if (write_type != 0) {
1353 				CHECK_STATUS(ddr3_tip_ext_write
1354 					     (dev_num, if_id, addr, 1,
1355 					      xsb_test_table[seq]));
1356 			}
1357 			if (read_type != 0) {
1358 				CHECK_STATUS(ddr3_tip_ext_read
1359 					     (dev_num, if_id, addr, 1,
1360 					      data_read));
1361 			}
1362 			if ((read_type != 0) && (write_type != 0)) {
1363 				ret_tmp =
1364 					ddr3_tip_compare(if_id,
1365 							 xsb_test_table[seq],
1366 							 data_read,
1367 							 0xff);
1368 				addr += (EXT_ACCESS_BURST_LENGTH * 4);
1369 				ret = (ret != MV_OK) ? ret : ret_tmp;
1370 			}
1371 		}
1372 	}
1373 
1374 	return ret;
1375 }
1376 
1377 #else /*EXCLUDE_SWITCH_DEBUG */
1378 u32 start_xsb_offset = 0;
1379 
run_xsb_test(u32 dev_num,u32 mem_addr,u32 write_type,u32 read_type,u32 burst_length)1380 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1381 		 u32 read_type, u32 burst_length)
1382 {
1383 	return MV_OK;
1384 }
1385 
1386 #endif /* EXCLUDE_SWITCH_DEBUG */
1387