• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) Marvell International Ltd. and its affiliates
4  */
5 
6 #include "ddr3_init.h"
7 
8 u8 is_reg_dump = 0;
9 u8 debug_pbs = DEBUG_LEVEL_ERROR;
10 
11 /*
12  * API to change flags outside of the lib
13  */
14 #if defined(SILENT_LIB)
ddr3_hws_set_log_level(enum ddr_lib_debug_block block,u8 level)15 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
16 {
17 	/* do nothing */
18 }
19 #else /* SILENT_LIB */
20 /* Debug flags for other Training modules */
21 u8 debug_training_static = DEBUG_LEVEL_ERROR;
22 u8 debug_training = DEBUG_LEVEL_ERROR;
23 u8 debug_leveling = DEBUG_LEVEL_ERROR;
24 u8 debug_centralization = DEBUG_LEVEL_ERROR;
25 u8 debug_training_ip = DEBUG_LEVEL_ERROR;
26 u8 debug_training_bist = DEBUG_LEVEL_ERROR;
27 u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
28 u8 debug_training_access = DEBUG_LEVEL_ERROR;
29 u8 debug_training_device = DEBUG_LEVEL_ERROR;
30 
31 
mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)32 void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
33 {
34 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
35 	ddr3_hws_set_log_level(block, tm->debug_level);
36 };
37 
ddr3_hws_set_log_level(enum ddr_lib_debug_block block,u8 level)38 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
39 {
40 	switch (block) {
41 	case DEBUG_BLOCK_STATIC:
42 		debug_training_static = level;
43 		break;
44 	case DEBUG_BLOCK_TRAINING_MAIN:
45 		debug_training = level;
46 		break;
47 	case DEBUG_BLOCK_LEVELING:
48 		debug_leveling = level;
49 		break;
50 	case DEBUG_BLOCK_CENTRALIZATION:
51 		debug_centralization = level;
52 		break;
53 	case DEBUG_BLOCK_PBS:
54 		debug_pbs = level;
55 		break;
56 	case DEBUG_BLOCK_ALG:
57 		debug_training_hw_alg = level;
58 		break;
59 	case DEBUG_BLOCK_DEVICE:
60 		debug_training_device = level;
61 		break;
62 	case DEBUG_BLOCK_ACCESS:
63 		debug_training_access = level;
64 		break;
65 	case DEBUG_STAGES_REG_DUMP:
66 		if (level == DEBUG_LEVEL_TRACE)
67 			is_reg_dump = 1;
68 		else
69 			is_reg_dump = 0;
70 		break;
71 	case DEBUG_BLOCK_ALL:
72 	default:
73 		debug_training_static = level;
74 		debug_training = level;
75 		debug_leveling = level;
76 		debug_centralization = level;
77 		debug_pbs = level;
78 		debug_training_hw_alg = level;
79 		debug_training_access = level;
80 		debug_training_device = level;
81 	}
82 }
83 #endif /* SILENT_LIB */
84 
85 #if defined(DDR_VIEWER_TOOL)
86 static char *convert_freq(enum hws_ddr_freq freq);
87 #if defined(EXCLUDE_SWITCH_DEBUG)
88 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
89 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
90 u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
91 u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
92 #endif /* EXCLUDE_SWITCH_DEBUG */
93 #endif /* DDR_VIEWER_TOOL */
94 
95 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
96 u8 is_default_centralization = 0;
97 u8 is_tune_result = 0;
98 u8 is_validate_window_per_if = 0;
99 u8 is_validate_window_per_pup = 0;
100 u8 sweep_cnt = 1;
101 u32 is_bist_reset_bit = 1;
102 u8 is_run_leveling_sweep_tests;
103 
104 static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
105 
106 /*
107  * Dump Dunit & Phy registers
108  */
ddr3_tip_reg_dump(u32 dev_num)109 int ddr3_tip_reg_dump(u32 dev_num)
110 {
111 	u32 if_id, reg_addr, data_value, bus_id;
112 	u32 read_data[MAX_INTERFACE_NUM];
113 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
114 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
115 
116 	printf("-- dunit registers --\n");
117 	for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
118 		printf("0x%x ", reg_addr);
119 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
120 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
121 			CHECK_STATUS(ddr3_tip_if_read
122 				     (dev_num, ACCESS_TYPE_UNICAST,
123 				      if_id, reg_addr, read_data,
124 				      MASK_ALL_BITS));
125 			printf("0x%x ", read_data[if_id]);
126 		}
127 		printf("\n");
128 	}
129 
130 	printf("-- Phy registers --\n");
131 	for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
132 		printf("0x%x ", reg_addr);
133 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
134 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
135 			for (bus_id = 0;
136 			     bus_id < octets_per_if_num;
137 			     bus_id++) {
138 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
139 				CHECK_STATUS(ddr3_tip_bus_read
140 					     (dev_num, if_id,
141 					      ACCESS_TYPE_UNICAST, bus_id,
142 					      DDR_PHY_DATA, reg_addr,
143 					      &data_value));
144 				printf("0x%x ", data_value);
145 			}
146 			for (bus_id = 0;
147 			     bus_id < octets_per_if_num;
148 			     bus_id++) {
149 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
150 				CHECK_STATUS(ddr3_tip_bus_read
151 					     (dev_num, if_id,
152 					      ACCESS_TYPE_UNICAST, bus_id,
153 					      DDR_PHY_CONTROL, reg_addr,
154 					      &data_value));
155 				printf("0x%x ", data_value);
156 			}
157 		}
158 		printf("\n");
159 	}
160 
161 	return MV_OK;
162 }
163 
164 /*
165  * Register access func registration
166  */
ddr3_tip_init_config_func(u32 dev_num,struct hws_tip_config_func_db * config_func)167 int ddr3_tip_init_config_func(u32 dev_num,
168 			      struct hws_tip_config_func_db *config_func)
169 {
170 	if (config_func == NULL)
171 		return MV_BAD_PARAM;
172 
173 	memcpy(&config_func_info[dev_num], config_func,
174 	       sizeof(struct hws_tip_config_func_db));
175 
176 	return MV_OK;
177 }
178 
179 /*
180  * Read training result table
181  */
hws_ddr3_tip_read_training_result(u32 dev_num,enum hws_result result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM])182 int hws_ddr3_tip_read_training_result(
183 	u32 dev_num, enum hws_result result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM])
184 {
185 	if (result == NULL)
186 		return MV_BAD_PARAM;
187 
188 	memcpy(result, training_result,
189 	       sizeof(enum hws_result) *
190 	       MAX_STAGE_LIMIT *
191 	       MAX_INTERFACE_NUM);
192 
193 	return MV_OK;
194 }
195 
196 /*
197  * Get training result info pointer
198  */
ddr3_tip_get_result_ptr(u32 stage)199 enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
200 {
201 	return training_result[stage];
202 }
203 
204 /*
205  * Device info read
206  */
ddr3_tip_get_device_info(u32 dev_num,struct ddr3_device_info * info_ptr)207 int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
208 {
209 	if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
210 		return config_func_info[dev_num].
211 			tip_get_device_info_func((u8) dev_num, info_ptr);
212 	}
213 
214 	return MV_FAIL;
215 }
216 
217 #if defined(DDR_VIEWER_TOOL)
218 /*
219  * Convert freq to character string
220  */
convert_freq(enum hws_ddr_freq freq)221 static char *convert_freq(enum hws_ddr_freq freq)
222 {
223 	switch (freq) {
224 	case DDR_FREQ_LOW_FREQ:
225 		return "DDR_FREQ_LOW_FREQ";
226 
227 	case DDR_FREQ_400:
228 		return "400";
229 
230 	case DDR_FREQ_533:
231 		return "533";
232 
233 	case DDR_FREQ_667:
234 		return "667";
235 
236 	case DDR_FREQ_800:
237 		return "800";
238 
239 	case DDR_FREQ_933:
240 		return "933";
241 
242 	case DDR_FREQ_1066:
243 		return "1066";
244 
245 	case DDR_FREQ_311:
246 		return "311";
247 
248 	case DDR_FREQ_333:
249 		return "333";
250 
251 	case DDR_FREQ_467:
252 		return "467";
253 
254 	case DDR_FREQ_850:
255 		return "850";
256 
257 	case DDR_FREQ_900:
258 		return "900";
259 
260 	case DDR_FREQ_360:
261 		return "DDR_FREQ_360";
262 
263 	case DDR_FREQ_1000:
264 		return "DDR_FREQ_1000";
265 
266 	default:
267 		return "Unknown Frequency";
268 	}
269 }
270 
271 /*
272  * Convert device ID to character string
273  */
convert_dev_id(u32 dev_id)274 static char *convert_dev_id(u32 dev_id)
275 {
276 	switch (dev_id) {
277 	case 0x6800:
278 		return "A38xx";
279 	case 0x6900:
280 		return "A39XX";
281 	case 0xf400:
282 		return "AC3";
283 	case 0xfc00:
284 		return "BC2";
285 
286 	default:
287 		return "Unknown Device";
288 	}
289 }
290 
291 /*
292  * Convert device ID to character string
293  */
convert_mem_size(u32 dev_id)294 static char *convert_mem_size(u32 dev_id)
295 {
296 	switch (dev_id) {
297 	case 0:
298 		return "512 MB";
299 	case 1:
300 		return "1 GB";
301 	case 2:
302 		return "2 GB";
303 	case 3:
304 		return "4 GB";
305 	case 4:
306 		return "8 GB";
307 
308 	default:
309 		return "wrong mem size";
310 	}
311 }
312 
print_device_info(u8 dev_num)313 int print_device_info(u8 dev_num)
314 {
315 	struct ddr3_device_info info_ptr;
316 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
317 
318 	CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
319 	printf("=== DDR setup START===\n");
320 	printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
321 	printf("\tDDR3  CK delay: %d\n", info_ptr.ck_delay);
322 	print_topology(tm);
323 	printf("=== DDR setup END===\n");
324 
325 	return MV_OK;
326 }
327 
hws_ddr3_tip_sweep_test(int enable)328 void hws_ddr3_tip_sweep_test(int enable)
329 {
330 	if (enable) {
331 		is_validate_window_per_if = 1;
332 		is_validate_window_per_pup = 1;
333 		debug_training = DEBUG_LEVEL_TRACE;
334 	} else {
335 		is_validate_window_per_if = 0;
336 		is_validate_window_per_pup = 0;
337 	}
338 }
339 #endif /* DDR_VIEWER_TOOL */
340 
ddr3_tip_convert_tune_result(enum hws_result tune_result)341 char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
342 {
343 	switch (tune_result) {
344 	case TEST_FAILED:
345 		return "FAILED";
346 	case TEST_SUCCESS:
347 		return "PASS";
348 	case NO_TEST_DONE:
349 		return "NOT COMPLETED";
350 	default:
351 		return "Un-KNOWN";
352 	}
353 }
354 
355 /*
356  * Print log info
357  */
ddr3_tip_print_log(u32 dev_num,u32 mem_addr)358 int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
359 {
360 	u32 if_id = 0;
361 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
362 
363 #if defined(DDR_VIEWER_TOOL)
364 	if ((is_validate_window_per_if != 0) ||
365 	    (is_validate_window_per_pup != 0)) {
366 		u32 is_pup_log = 0;
367 		enum hws_ddr_freq freq;
368 
369 		freq = tm->interface_params[first_active_if].memory_freq;
370 
371 		is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
372 		printf("===VALIDATE WINDOW LOG START===\n");
373 		printf("DDR Frequency: %s   ======\n", convert_freq(freq));
374 		/* print sweep windows */
375 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
376 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
377 #if defined(EXCLUDE_SWITCH_DEBUG)
378 		if (is_run_leveling_sweep_tests == 1) {
379 			ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
380 			ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
381 		}
382 #endif /* EXCLUDE_SWITCH_DEBUG */
383 		ddr3_tip_print_all_pbs_result(dev_num);
384 		ddr3_tip_print_wl_supp_result(dev_num);
385 		printf("===VALIDATE WINDOW LOG END ===\n");
386 		CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
387 		ddr3_tip_reg_dump(dev_num);
388 	}
389 #endif /* DDR_VIEWER_TOOL */
390 
391 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
392 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
393 
394 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
395 				  ("IF %d Status:\n", if_id));
396 
397 		if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
398 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
399 					  ("\tInit Controller: %s\n",
400 					   ddr3_tip_convert_tune_result
401 					   (training_result[INIT_CONTROLLER]
402 					    [if_id])));
403 		}
404 		if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
405 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
406 					  ("\tLow freq Config: %s\n",
407 					   ddr3_tip_convert_tune_result
408 					   (training_result[SET_LOW_FREQ]
409 					    [if_id])));
410 		}
411 		if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
412 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
413 					  ("\tLoad Pattern: %s\n",
414 					   ddr3_tip_convert_tune_result
415 					   (training_result[LOAD_PATTERN]
416 					    [if_id])));
417 		}
418 		if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
419 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
420 					  ("\tMedium freq Config: %s\n",
421 					   ddr3_tip_convert_tune_result
422 					   (training_result[SET_MEDIUM_FREQ]
423 					    [if_id])));
424 		}
425 		if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
426 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
427 					  ("\tWL: %s\n",
428 					   ddr3_tip_convert_tune_result
429 					   (training_result[WRITE_LEVELING]
430 					    [if_id])));
431 		}
432 		if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
433 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
434 					  ("\tLoad Pattern: %s\n",
435 					   ddr3_tip_convert_tune_result
436 					   (training_result[LOAD_PATTERN_2]
437 					    [if_id])));
438 		}
439 		if (mask_tune_func & READ_LEVELING_MASK_BIT) {
440 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
441 					  ("\tRL: %s\n",
442 					   ddr3_tip_convert_tune_result
443 					   (training_result[READ_LEVELING]
444 					    [if_id])));
445 		}
446 		if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
447 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
448 					  ("\tWL Supp: %s\n",
449 					   ddr3_tip_convert_tune_result
450 					   (training_result[WRITE_LEVELING_SUPP]
451 					    [if_id])));
452 		}
453 		if (mask_tune_func & PBS_RX_MASK_BIT) {
454 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
455 					  ("\tPBS RX: %s\n",
456 					   ddr3_tip_convert_tune_result
457 					   (training_result[PBS_RX]
458 					    [if_id])));
459 		}
460 		if (mask_tune_func & PBS_TX_MASK_BIT) {
461 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
462 					  ("\tPBS TX: %s\n",
463 					   ddr3_tip_convert_tune_result
464 					   (training_result[PBS_TX]
465 					    [if_id])));
466 		}
467 		if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
468 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
469 					  ("\tTarget freq Config: %s\n",
470 					   ddr3_tip_convert_tune_result
471 					   (training_result[SET_TARGET_FREQ]
472 					    [if_id])));
473 		}
474 		if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
475 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
476 					  ("\tWL TF: %s\n",
477 					   ddr3_tip_convert_tune_result
478 					   (training_result[WRITE_LEVELING_TF]
479 					    [if_id])));
480 		}
481 		if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
482 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
483 					  ("\tRL TF: %s\n",
484 					   ddr3_tip_convert_tune_result
485 					   (training_result[READ_LEVELING_TF]
486 					    [if_id])));
487 		}
488 		if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
489 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
490 					  ("\tWL TF Supp: %s\n",
491 					   ddr3_tip_convert_tune_result
492 					   (training_result
493 					    [WRITE_LEVELING_SUPP_TF]
494 					    [if_id])));
495 		}
496 		if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
497 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
498 					  ("\tCentr RX: %s\n",
499 					   ddr3_tip_convert_tune_result
500 					   (training_result[CENTRALIZATION_RX]
501 					    [if_id])));
502 		}
503 		if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
504 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
505 					  ("\tVREF_CALIBRATION: %s\n",
506 					   ddr3_tip_convert_tune_result
507 					   (training_result[VREF_CALIBRATION]
508 					    [if_id])));
509 		}
510 		if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
511 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
512 					  ("\tCentr TX: %s\n",
513 					   ddr3_tip_convert_tune_result
514 					   (training_result[CENTRALIZATION_TX]
515 					    [if_id])));
516 		}
517 	}
518 
519 	return MV_OK;
520 }
521 
522 #if !defined(EXCLUDE_DEBUG_PRINTS)
523 /*
524  * Print stability log info
525  */
ddr3_tip_print_stability_log(u32 dev_num)526 int ddr3_tip_print_stability_log(u32 dev_num)
527 {
528 	u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
529 	u32 reg_data;
530 	u32 read_data[MAX_INTERFACE_NUM];
531 	u32 max_cs = ddr3_tip_max_cs_get(dev_num);
532 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
533 
534 	/* Title print */
535 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
536 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
537 		printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
538 		for (csindex = 0; csindex < max_cs; csindex++) {
539 			printf("CS%d , ", csindex);
540 			printf("\n");
541 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
542 			printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
543 			printf("\t\t");
544 			for (idx = 0; idx < 11; idx++)
545 				printf("PBSTx-Pad%d,", idx);
546 			printf("\t\t");
547 			for (idx = 0; idx < 11; idx++)
548 				printf("PBSRx-Pad%d,", idx);
549 		}
550 	}
551 	printf("\n");
552 
553 	/* Data print */
554 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
555 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
556 
557 		printf("Data: %d,%d,", if_id,
558 		       (config_func_info[dev_num].tip_get_temperature != NULL)
559 		       ? (config_func_info[dev_num].
560 			  tip_get_temperature(dev_num)) : (0));
561 
562 		CHECK_STATUS(ddr3_tip_if_read
563 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
564 			      read_data, MASK_ALL_BITS));
565 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
566 		       ((read_data[if_id] & 0xfc00) >> 10));
567 		CHECK_STATUS(ddr3_tip_if_read
568 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
569 			      read_data, MASK_ALL_BITS));
570 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
571 		       ((read_data[if_id] & 0xfc00) >> 10));
572 		CHECK_STATUS(ddr3_tip_if_read
573 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
574 			      read_data, MASK_ALL_BITS));
575 		printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
576 		       ((read_data[if_id] & 0xfc00000) >> 22));
577 
578 		for (csindex = 0; csindex < max_cs; csindex++) {
579 			printf("CS%d , ", csindex);
580 			for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
581 				printf("\n");
582 				VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
583 				ddr3_tip_bus_read(dev_num, if_id,
584 						  ACCESS_TYPE_UNICAST,
585 						  bus_id, DDR_PHY_DATA,
586 						  RESULT_PHY_REG +
587 						  csindex, &reg_data);
588 				printf("%d,%d,", (reg_data & 0x1f),
589 				       ((reg_data & 0x3e0) >> 5));
590 				/* WL */
591 				ddr3_tip_bus_read(dev_num, if_id,
592 						  ACCESS_TYPE_UNICAST,
593 						  bus_id, DDR_PHY_DATA,
594 						  WL_PHY_REG(csindex),
595 						  &reg_data);
596 				printf("%d,%d,%d,",
597 				       (reg_data & 0x1f) +
598 				       ((reg_data & 0x1c0) >> 6) * 32,
599 				       (reg_data & 0x1f),
600 				       (reg_data & 0x1c0) >> 6);
601 				/* RL */
602 				CHECK_STATUS(ddr3_tip_if_read
603 					     (dev_num, ACCESS_TYPE_UNICAST,
604 					      if_id,
605 					      RD_DATA_SMPL_DLYS_REG,
606 					      read_data, MASK_ALL_BITS));
607 				read_data[if_id] =
608 					(read_data[if_id] &
609 					 (0x1f << (8 * csindex))) >>
610 					(8 * csindex);
611 				ddr3_tip_bus_read(dev_num, if_id,
612 						  ACCESS_TYPE_UNICAST, bus_id,
613 						  DDR_PHY_DATA,
614 						  RL_PHY_REG(csindex),
615 						  &reg_data);
616 				printf("%d,%d,%d,%d,",
617 				       (reg_data & 0x1f) +
618 				       ((reg_data & 0x1c0) >> 6) * 32 +
619 				       read_data[if_id] * 64,
620 				       (reg_data & 0x1f),
621 				       ((reg_data & 0x1c0) >> 6),
622 				       read_data[if_id]);
623 				/* Centralization */
624 				ddr3_tip_bus_read(dev_num, if_id,
625 						  ACCESS_TYPE_UNICAST, bus_id,
626 						  DDR_PHY_DATA,
627 						  CTX_PHY_REG(csindex),
628 						  &reg_data);
629 				printf("%d,", (reg_data & 0x3f));
630 				ddr3_tip_bus_read(dev_num, if_id,
631 						  ACCESS_TYPE_UNICAST, bus_id,
632 						  DDR_PHY_DATA,
633 						  CRX_PHY_REG(csindex),
634 						   &reg_data);
635 				printf("%d,", (reg_data & 0x1f));
636 				/* Vref */
637 				ddr3_tip_bus_read(dev_num, if_id,
638 						  ACCESS_TYPE_UNICAST, bus_id,
639 						  DDR_PHY_DATA,
640 						  PAD_CFG_PHY_REG,
641 						  &reg_data);
642 				printf("%d,", (reg_data & 0x7));
643 				/* DQVref */
644 				/* Need to add the Read Function from device */
645 				printf("%d,", 0);
646 				printf("\t\t");
647 				for (idx = 0; idx < 11; idx++) {
648 					ddr3_tip_bus_read(dev_num, if_id,
649 							  ACCESS_TYPE_UNICAST,
650 							  bus_id, DDR_PHY_DATA,
651 							  0x10 +
652 							  16 * csindex +
653 							  idx, &reg_data);
654 					printf("%d,", (reg_data & 0x3f));
655 				}
656 				printf("\t\t");
657 				for (idx = 0; idx < 11; idx++) {
658 					ddr3_tip_bus_read(dev_num, if_id,
659 							  ACCESS_TYPE_UNICAST,
660 							  bus_id, DDR_PHY_DATA,
661 							  0x50 +
662 							  16 * csindex +
663 							  idx, &reg_data);
664 					printf("%d,", (reg_data & 0x3f));
665 				}
666 			}
667 		}
668 	}
669 	printf("\n");
670 
671 	return MV_OK;
672 }
673 #endif /* EXCLUDE_DEBUG_PRINTS */
674 
675 /*
676  * Register XSB information
677  */
ddr3_tip_register_xsb_info(u32 dev_num,struct hws_xsb_info * xsb_info_table)678 int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
679 {
680 	memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
681 	return MV_OK;
682 }
683 
684 /*
685  * Read ADLL Value
686  */
ddr3_tip_read_adll_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 reg_addr,u32 mask)687 int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
688 			     u32 reg_addr, u32 mask)
689 {
690 	u32 data_value;
691 	u32 if_id = 0, bus_id = 0;
692 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
693 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
694 
695 	/*
696 	 * multi CS support - reg_addr is calucalated in calling function
697 	 * with CS offset
698 	 */
699 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
700 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
701 		for (bus_id = 0; bus_id < octets_per_if_num;
702 		     bus_id++) {
703 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
704 			CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
705 						       ACCESS_TYPE_UNICAST,
706 						       bus_id,
707 						       DDR_PHY_DATA, reg_addr,
708 						       &data_value));
709 			pup_values[if_id *
710 				   octets_per_if_num + bus_id] =
711 				data_value & mask;
712 		}
713 	}
714 
715 	return 0;
716 }
717 
718 /*
719  * Write ADLL Value
720  */
ddr3_tip_write_adll_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 reg_addr)721 int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
722 			      u32 reg_addr)
723 {
724 	u32 if_id = 0, bus_id = 0;
725 	u32 data;
726 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
727 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
728 
729 	/*
730 	 * multi CS support - reg_addr is calucalated in calling function
731 	 * with CS offset
732 	 */
733 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
734 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
735 		for (bus_id = 0; bus_id < octets_per_if_num;
736 		     bus_id++) {
737 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
738 			data = pup_values[if_id *
739 					  octets_per_if_num +
740 					  bus_id];
741 			CHECK_STATUS(ddr3_tip_bus_write(dev_num,
742 							ACCESS_TYPE_UNICAST,
743 							if_id,
744 							ACCESS_TYPE_UNICAST,
745 							bus_id, DDR_PHY_DATA,
746 							reg_addr, data));
747 		}
748 	}
749 
750 	return 0;
751 }
752 
753 /**
754  * Read Phase Value
755  */
read_phase_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],int reg_addr,u32 mask)756 int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
757 		     int reg_addr, u32 mask)
758 {
759 	u32  data_value;
760 	u32 if_id = 0, bus_id = 0;
761 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
762 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
763 
764 	/* multi CS support - reg_addr is calucalated in calling function with CS offset */
765 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
766 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
767 		for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
768 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
769 			CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
770 						       ACCESS_TYPE_UNICAST,
771 						       bus_id,
772 						       DDR_PHY_DATA, reg_addr,
773 						       &data_value));
774 			pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
775 		}
776 	}
777 
778 	return 0;
779 }
780 
781 /**
782  * Write Leveling Value
783  */
write_leveling_value(u32 dev_num,u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],int reg_addr)784 int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
785 			 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
786 {
787 	u32 if_id = 0, bus_id = 0;
788 	u32 data;
789 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
790 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
791 
792 	/* multi CS support - reg_addr is calucalated in calling function with CS offset */
793 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
794 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
795 		for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
796 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
797 			data = pup_values[if_id * octets_per_if_num + bus_id] +
798 			       pup_ph_values[if_id * octets_per_if_num + bus_id];
799 			CHECK_STATUS(ddr3_tip_bus_write(dev_num,
800 							ACCESS_TYPE_UNICAST,
801 							if_id,
802 							ACCESS_TYPE_UNICAST,
803 							bus_id,
804 							DDR_PHY_DATA,
805 							reg_addr,
806 							data));
807 		}
808 	}
809 
810 	return 0;
811 }
812 
813 #if !defined(EXCLUDE_SWITCH_DEBUG)
814 struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
815 u32 start_xsb_offset = 0;
816 u8 is_rl_old = 0;
817 u8 is_freq_old = 0;
818 u8 is_dfs_disabled = 0;
819 u32 default_centrlization_value = 0x12;
820 u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
821 	rl_test = 0, reset_read_fifo = 0;
822 int debug_acc = 0;
823 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
824 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
825 
826 u32 xsb_test_table[][8] = {
827 	{0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
828 	 0x66666666, 0x77777777},
829 	{0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
830 	 0xeeeeeeee, 0xffffffff},
831 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
832 	 0x00000000, 0xffffffff},
833 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
834 	 0x00000000, 0xffffffff},
835 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
836 	 0x00000000, 0xffffffff},
837 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
838 	 0x00000000, 0xffffffff},
839 	{0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
840 	 0xffffffff, 0xffffffff},
841 	{0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
842 	 0x00000000, 0x00000000},
843 	{0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
844 	 0xffffffff, 0xffffffff}
845 };
846 
847 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr);
848 
ddr3_tip_print_adll(void)849 int ddr3_tip_print_adll(void)
850 {
851 	u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
852 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
853 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
854 
855 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
856 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
857 		for (bus_cnt = 0; bus_cnt < octets_per_if_num;
858 		     bus_cnt++) {
859 			VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
860 			CHECK_STATUS(ddr3_tip_bus_read
861 				     (dev_num, if_id,
862 				      ACCESS_TYPE_UNICAST, bus_cnt,
863 				      DDR_PHY_DATA, 0x1, &data_p1));
864 			CHECK_STATUS(ddr3_tip_bus_read
865 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
866 				      bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
867 			CHECK_STATUS(ddr3_tip_bus_read
868 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
869 				      bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
870 			DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
871 					  (" IF %d bus_cnt %d  phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
872 					   if_id, bus_cnt, data_p1, data_p2,
873 					   ui_data3));
874 			}
875 	}
876 
877 	return MV_OK;
878 }
879 
880 /*
881  * Set attribute value
882  */
ddr3_tip_set_atr(u32 dev_num,u32 flag_id,u32 value)883 int ddr3_tip_set_atr(u32 dev_num, u32 flag_id, u32 value)
884 {
885 	int ret;
886 	u32 *ptr_flag = NULL;
887 
888 	ret = ddr3_tip_access_atr(dev_num, flag_id, value, &ptr_flag);
889 	if (ptr_flag != NULL) {
890 		printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x (was 0x%x)\n",
891 		       flag_id, value, *ptr_flag);
892 		*ptr_flag = value;
893 	} else {
894 		printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x\n",
895 		       flag_id, value);
896 	}
897 
898 	return ret;
899 }
900 
901 /*
902  * Access attribute
903  */
ddr3_tip_access_atr(u32 dev_num,u32 flag_id,u32 value,u32 ** ptr)904 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr)
905 {
906 	u32 tmp_val = 0, if_id = 0, pup_id = 0;
907 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
908 
909 	*ptr = NULL;
910 
911 	switch (flag_id) {
912 	case 0:
913 		*ptr = (u32 *)&(tm->if_act_mask);
914 		break;
915 
916 	case 0x1:
917 		*ptr = (u32 *)&mask_tune_func;
918 		break;
919 
920 	case 0x2:
921 		low_freq = (enum hws_ddr_freq)value;
922 		break;
923 
924 	case 0x3:
925 		medium_freq = (enum hws_ddr_freq)value;
926 		break;
927 
928 	case 0x4:
929 		*ptr = (u32 *)&generic_init_controller;
930 		break;
931 
932 	case 0x8:
933 		*ptr = (u32 *)&start_xsb_offset;
934 		break;
935 
936 	case 0x20:
937 		*ptr = (u32 *)&is_rl_old;
938 		break;
939 
940 	case 0x21:
941 		*ptr = (u32 *)&is_freq_old;
942 		break;
943 
944 	case 0x23:
945 		*ptr = (u32 *)&is_dfs_disabled;
946 		break;
947 
948 	case 0x24:
949 		*ptr = (u32 *)&is_pll_before_init;
950 		break;
951 
952 	case 0x25:
953 		*ptr = (u32 *)&is_adll_calib_before_init;
954 		break;
955 	case 0x28:
956 		*ptr = (u32 *)&is_tune_result;
957 		break;
958 
959 	case 0x29:
960 		*ptr = (u32 *)&is_validate_window_per_if;
961 		break;
962 
963 	case 0x2a:
964 		*ptr = (u32 *)&is_validate_window_per_pup;
965 		break;
966 
967 	case 0x30:
968 		*ptr = (u32 *)&sweep_cnt;
969 		break;
970 
971 	case 0x31:
972 		*ptr = (u32 *)&is_bist_reset_bit;
973 		break;
974 
975 	case 0x32:
976 		*ptr = (u32 *)&is_dfs_in_init;
977 		break;
978 
979 	case 0x33:
980 		*ptr = (u32 *)&g_zpodt_data;
981 		break;
982 
983 	case 0x34:
984 		*ptr = (u32 *)&g_znodt_data;
985 		break;
986 
987 	case 0x35:
988 		break;
989 
990 	case 0x36:
991 		*ptr = (u32 *)&(freq_val[DDR_FREQ_LOW_FREQ]);
992 		break;
993 
994 	case 0x37:
995 		*ptr = (u32 *)&start_pattern;
996 		break;
997 
998 	case 0x38:
999 		*ptr = (u32 *)&end_pattern;
1000 		break;
1001 
1002 	case 0x39:
1003 		*ptr = (u32 *)&phy_reg0_val;
1004 		break;
1005 
1006 	case 0x4a:
1007 		*ptr = (u32 *)&phy_reg1_val;
1008 		break;
1009 
1010 	case 0x4b:
1011 		*ptr = (u32 *)&phy_reg2_val;
1012 		break;
1013 
1014 	case 0x4c:
1015 		*ptr = (u32 *)&phy_reg3_val;
1016 		break;
1017 
1018 	case 0x4e:
1019 		sweep_pattern = (enum hws_pattern)value;
1020 		break;
1021 
1022 	case 0x51:
1023 		*ptr = (u32 *)&g_znri_data;
1024 		break;
1025 
1026 	case 0x52:
1027 		*ptr = (u32 *)&g_zpri_data;
1028 		break;
1029 
1030 	case 0x53:
1031 		*ptr = (u32 *)&finger_test;
1032 		break;
1033 
1034 	case 0x54:
1035 		*ptr = (u32 *)&n_finger_start;
1036 		break;
1037 
1038 	case 0x55:
1039 		*ptr = (u32 *)&n_finger_end;
1040 		break;
1041 
1042 	case 0x56:
1043 		*ptr = (u32 *)&p_finger_start;
1044 		break;
1045 
1046 	case 0x57:
1047 		*ptr = (u32 *)&p_finger_end;
1048 		break;
1049 
1050 	case 0x58:
1051 		*ptr = (u32 *)&p_finger_step;
1052 		break;
1053 
1054 	case 0x59:
1055 		*ptr = (u32 *)&n_finger_step;
1056 		break;
1057 
1058 	case 0x5a:
1059 		*ptr = (u32 *)&g_znri_ctrl;
1060 		break;
1061 
1062 	case 0x5b:
1063 		*ptr = (u32 *)&g_zpri_ctrl;
1064 		break;
1065 
1066 	case 0x5c:
1067 		*ptr = (u32 *)&is_reg_dump;
1068 		break;
1069 
1070 	case 0x5d:
1071 		*ptr = (u32 *)&vref_init_val;
1072 		break;
1073 
1074 	case 0x5e:
1075 		*ptr = (u32 *)&mode_2t;
1076 		break;
1077 
1078 	case 0x5f:
1079 		*ptr = (u32 *)&xsb_validate_type;
1080 		break;
1081 
1082 	case 0x60:
1083 		*ptr = (u32 *)&xsb_validation_base_address;
1084 		break;
1085 
1086 	case 0x67:
1087 		*ptr = (u32 *)&activate_select_before_run_alg;
1088 		break;
1089 
1090 	case 0x68:
1091 		*ptr = (u32 *)&activate_deselect_after_run_alg;
1092 		break;
1093 
1094 	case 0x69:
1095 		*ptr = (u32 *)&odt_additional;
1096 		break;
1097 
1098 	case 0x70:
1099 		*ptr = (u32 *)&debug_mode;
1100 		break;
1101 
1102 	case 0x71:
1103 		pbs_pattern = (enum hws_pattern)value;
1104 		break;
1105 
1106 	case 0x72:
1107 		*ptr = (u32 *)&delay_enable;
1108 		break;
1109 
1110 	case 0x73:
1111 		*ptr = (u32 *)&ck_delay;
1112 		break;
1113 
1114 	case 0x75:
1115 		*ptr = (u32 *)&ca_delay;
1116 		break;
1117 
1118 	case 0x100:
1119 		*ptr = (u32 *)&debug_dunit;
1120 		break;
1121 
1122 	case 0x101:
1123 		debug_acc = (int)value;
1124 		break;
1125 
1126 	case 0x102:
1127 		debug_training = (u8)value;
1128 		break;
1129 
1130 	case 0x103:
1131 		debug_training_bist = (u8)value;
1132 		break;
1133 
1134 	case 0x104:
1135 		debug_centralization = (u8)value;
1136 		break;
1137 
1138 	case 0x105:
1139 		debug_training_ip = (u8)value;
1140 		break;
1141 
1142 	case 0x106:
1143 		debug_leveling = (u8)value;
1144 		break;
1145 
1146 	case 0x107:
1147 		debug_pbs = (u8)value;
1148 		break;
1149 
1150 	case 0x108:
1151 		debug_training_static = (u8)value;
1152 		break;
1153 
1154 	case 0x109:
1155 		debug_training_access = (u8)value;
1156 		break;
1157 
1158 
1159 	case 0x112:
1160 		*ptr = &start_pattern;
1161 		break;
1162 
1163 	case 0x113:
1164 		*ptr = &end_pattern;
1165 		break;
1166 
1167 	default:
1168 		if ((flag_id >= 0x200) && (flag_id < 0x210)) {
1169 			if_id = flag_id - 0x200;
1170 			*ptr = (u32 *)&(tm->interface_params
1171 					[if_id].memory_freq);
1172 		} else if ((flag_id >= 0x210) && (flag_id < 0x220)) {
1173 			if_id = flag_id - 0x210;
1174 			*ptr = (u32 *)&(tm->interface_params
1175 					[if_id].speed_bin_index);
1176 		} else if ((flag_id >= 0x220) && (flag_id < 0x230)) {
1177 			if_id = flag_id - 0x220;
1178 			*ptr = (u32 *)&(tm->interface_params
1179 					[if_id].bus_width);
1180 		} else if ((flag_id >= 0x230) && (flag_id < 0x240)) {
1181 			if_id = flag_id - 0x230;
1182 			*ptr = (u32 *)&(tm->interface_params
1183 					[if_id].memory_size);
1184 		} else if ((flag_id >= 0x240) && (flag_id < 0x250)) {
1185 			if_id = flag_id - 0x240;
1186 			*ptr = (u32 *)&(tm->interface_params
1187 					[if_id].cas_l);
1188 		} else if ((flag_id >= 0x250) && (flag_id < 0x260)) {
1189 			if_id = flag_id - 0x250;
1190 			*ptr = (u32 *)&(tm->interface_params
1191 					[if_id].cas_wl);
1192 		} else if ((flag_id >= 0x270) && (flag_id < 0x2cf)) {
1193 			if_id = (flag_id - 0x270) / MAX_BUS_NUM;
1194 			pup_id = (flag_id - 0x270) % MAX_BUS_NUM;
1195 			*ptr = (u32 *)&(tm->interface_params[if_id].
1196 					as_bus_params[pup_id].is_ck_swap);
1197 		} else if ((flag_id >= 0x2d0) && (flag_id < 0x32f)) {
1198 			if_id = (flag_id - 0x2d0) / MAX_BUS_NUM;
1199 			pup_id = (flag_id - 0x2d0) % MAX_BUS_NUM;
1200 			*ptr = (u32 *)&(tm->interface_params[if_id].
1201 					as_bus_params[pup_id].is_dqs_swap);
1202 		} else if ((flag_id >= 0x330) && (flag_id < 0x38f)) {
1203 			if_id = (flag_id - 0x330) / MAX_BUS_NUM;
1204 			pup_id = (flag_id - 0x330) % MAX_BUS_NUM;
1205 			*ptr = (u32 *)&(tm->interface_params[if_id].
1206 					as_bus_params[pup_id].cs_bitmask);
1207 		} else if ((flag_id >= 0x390) && (flag_id < 0x3ef)) {
1208 			if_id = (flag_id - 0x390) / MAX_BUS_NUM;
1209 			pup_id = (flag_id - 0x390) % MAX_BUS_NUM;
1210 			*ptr = (u32 *)&(tm->interface_params
1211 					[if_id].as_bus_params
1212 					[pup_id].mirror_enable_bitmask);
1213 		} else if ((flag_id >= 0x500) && (flag_id <= 0x50f)) {
1214 			tmp_val = flag_id - 0x320;
1215 			*ptr = (u32 *)&(clamp_tbl[tmp_val]);
1216 		} else {
1217 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1218 					  ("flag_id out of boundary %d\n",
1219 					   flag_id));
1220 			return MV_BAD_PARAM;
1221 		}
1222 	}
1223 
1224 	return MV_OK;
1225 }
1226 
1227 #endif /* EXCLUDE_SWITCH_DEBUG */
1228 
1229 #if defined(DDR_VIEWER_TOOL)
1230 /*
1231  * Print ADLL
1232  */
print_adll(u32 dev_num,u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])1233 int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1234 {
1235 	u32 i, j;
1236 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1237 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1238 
1239 	for (j = 0; j < octets_per_if_num; j++) {
1240 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1241 		for (i = 0; i < MAX_INTERFACE_NUM; i++)
1242 			printf("%d ,", adll[i * octets_per_if_num + j]);
1243 	}
1244 	printf("\n");
1245 
1246 	return MV_OK;
1247 }
1248 
print_ph(u32 dev_num,u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])1249 int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1250 {
1251 	u32 i, j;
1252 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1253 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1254 
1255 	for (j = 0; j < octets_per_if_num; j++) {
1256 		VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1257 		for (i = 0; i < MAX_INTERFACE_NUM; i++)
1258 			printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
1259 	}
1260 	printf("\n");
1261 
1262 	return MV_OK;
1263 }
1264 #endif /* DDR_VIEWER_TOOL */
1265 
1266 #if !defined(EXCLUDE_SWITCH_DEBUG)
1267 /* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
ddr3_tip_compare(u32 if_id,u32 * p_src,u32 * p_dst,u32 byte_index)1268 static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1269 			    u32 byte_index)
1270 {
1271 	u32 burst_cnt = 0, addr_offset, i_id;
1272 	int b_is_fail = 0;
1273 
1274 	addr_offset =
1275 		(byte_index ==
1276 		 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1277 	for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1278 		if ((p_src[burst_cnt] & addr_offset) !=
1279 		    (p_dst[if_id] & addr_offset))
1280 			b_is_fail = 1;
1281 	}
1282 
1283 	if (b_is_fail == 1) {
1284 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1285 				  ("IF %d exp: ", if_id));
1286 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1287 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1288 					  ("0x%8x ", p_src[i_id]));
1289 		}
1290 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1291 				  ("\n_i_f %d rcv: ", if_id));
1292 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1293 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1294 					  ("(0x%8x ", p_dst[i_id]));
1295 		}
1296 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1297 	}
1298 
1299 	return b_is_fail;
1300 }
1301 #endif /* EXCLUDE_SWITCH_DEBUG */
1302 
1303 #if defined(DDR_VIEWER_TOOL)
1304 /*
1305  * Sweep validation
1306  */
ddr3_tip_run_sweep_test(int dev_num,u32 repeat_num,u32 direction,u32 mode)1307 int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1308 			    u32 mode)
1309 {
1310 	u32 pup = 0, start_pup = 0, end_pup = 0;
1311 	u32 adll = 0, rep = 0, pattern_idx = 0;
1312 	u32 res[MAX_INTERFACE_NUM] = { 0 };
1313 	int if_id = 0;
1314 	u32 adll_value = 0;
1315 	u32 reg;
1316 	enum hws_access_type pup_access;
1317 	u32 cs;
1318 	u32 max_cs = ddr3_tip_max_cs_get(dev_num);
1319 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1320 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1321 
1322 	repeat_num = 2;
1323 
1324 	if (mode == 1) {
1325 		/* per pup */
1326 		start_pup = 0;
1327 		end_pup = octets_per_if_num - 1;
1328 		pup_access = ACCESS_TYPE_UNICAST;
1329 	} else {
1330 		start_pup = 0;
1331 		end_pup = 0;
1332 		pup_access = ACCESS_TYPE_MULTICAST;
1333 	}
1334 
1335 	for (cs = 0; cs < max_cs; cs++) {
1336 		reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
1337 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1338 			for (if_id = 0;
1339 			     if_id <= MAX_INTERFACE_NUM - 1;
1340 			     if_id++) {
1341 				VALIDATE_IF_ACTIVE
1342 					(tm->if_act_mask,
1343 					 if_id);
1344 				for (pup = start_pup; pup <= end_pup; pup++) {
1345 					ctrl_sweepres[adll][if_id][pup] =
1346 						0;
1347 				}
1348 			}
1349 		}
1350 
1351 		for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1352 			ctrl_adll[adll] = 0;
1353 			/* Save DQS value(after algorithm run) */
1354 			ddr3_tip_read_adll_value(dev_num, ctrl_adll,
1355 						 reg, MASK_ALL_BITS);
1356 
1357 		/*
1358 		 * Sweep ADLL  from 0:31 on all I/F on all Pup and perform
1359 		 * BIST on each stage.
1360 		 */
1361 		for (pup = start_pup; pup <= end_pup; pup++) {
1362 			for (adll = 0; adll < ADLL_LENGTH; adll++) {
1363 				for (rep = 0; rep < repeat_num; rep++) {
1364 					for (pattern_idx = PATTERN_KILLER_DQ0;
1365 					     pattern_idx < PATTERN_LAST;
1366 					     pattern_idx++) {
1367 						adll_value =
1368 							(direction == 0) ? (adll * 2) : adll;
1369 						CHECK_STATUS(ddr3_tip_bus_write
1370 							     (dev_num, ACCESS_TYPE_MULTICAST, 0,
1371 							      pup_access, pup, DDR_PHY_DATA,
1372 							      reg, adll_value));
1373 						hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1374 								  cs);
1375 						/* ddr3_tip_reset_fifo_ptr(dev_num); */
1376 						for (if_id = 0;
1377 						     if_id < MAX_INTERFACE_NUM;
1378 						     if_id++) {
1379 							VALIDATE_IF_ACTIVE
1380 								(tm->if_act_mask,
1381 								 if_id);
1382 							ctrl_sweepres[adll][if_id][pup]
1383 								+= res[if_id];
1384 							if (mode == 1) {
1385 								CHECK_STATUS
1386 									(ddr3_tip_bus_write
1387 									 (dev_num,
1388 									  ACCESS_TYPE_UNICAST,
1389 									  if_id,
1390 									  ACCESS_TYPE_UNICAST,
1391 									  pup,
1392 									  DDR_PHY_DATA,
1393 									  reg,
1394 									  ctrl_adll[if_id *
1395 										    cs *
1396 										    octets_per_if_num
1397 										    + pup]));
1398 							}
1399 						}
1400 					}
1401 				}
1402 			}
1403 		}
1404 		printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1405 		       ((direction == 0) ? "TX" : "RX"));
1406 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1407 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1408 			if (mode == 1) {
1409 				for (pup = start_pup; pup <= end_pup; pup++) {
1410 					VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1411 					printf("I/F%d-PHY%d , ", if_id, pup);
1412 				}
1413 			} else {
1414 				printf("I/F%d , ", if_id);
1415 			}
1416 		}
1417 		printf("\n");
1418 
1419 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1420 			adll_value = (direction == 0) ? (adll * 2) : adll;
1421 			printf("Final,%s, Sweep, Result, %d ,",
1422 			       ((direction == 0) ? "TX" : "RX"), adll_value);
1423 
1424 			for (if_id = 0;
1425 			     if_id <= MAX_INTERFACE_NUM - 1;
1426 			     if_id++) {
1427 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1428 				for (pup = start_pup; pup <= end_pup; pup++) {
1429 					printf("%8d , ",
1430 					       ctrl_sweepres[adll][if_id]
1431 					       [pup]);
1432 				}
1433 			}
1434 			printf("\n");
1435 		}
1436 
1437 		/*
1438 		 * Write back to the phy the Rx DQS value, we store in
1439 		 * the beginning.
1440 		 */
1441 		ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
1442 		/* print adll results */
1443 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1444 		printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1445 		print_adll(dev_num, ctrl_adll);
1446 	}
1447 	ddr3_tip_reset_fifo_ptr(dev_num);
1448 
1449 	return 0;
1450 }
1451 
1452 #if defined(EXCLUDE_SWITCH_DEBUG)
ddr3_tip_run_leveling_sweep_test(int dev_num,u32 repeat_num,u32 direction,u32 mode)1453 int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1454 				     u32 direction, u32 mode)
1455 {
1456 	u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1457 	u32 adll = 0, rep = 0, pattern_idx = 0;
1458 	u32 read_data[MAX_INTERFACE_NUM];
1459 	u32 res[MAX_INTERFACE_NUM] = { 0 };
1460 	int if_id = 0, gap = 0;
1461 	u32 adll_value = 0;
1462 	u32 reg;
1463 	enum hws_access_type pup_access;
1464 	u32 cs;
1465 	u32 max_cs = ddr3_tip_max_cs_get(dev_num);
1466 	u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1467 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1468 
1469 	if (mode == 1) { /* per pup */
1470 		start_pup = 0;
1471 		end_pup = octets_per_if_num - 1;
1472 		pup_access = ACCESS_TYPE_UNICAST;
1473 	} else {
1474 		start_pup = 0;
1475 		end_pup = 0;
1476 		pup_access = ACCESS_TYPE_MULTICAST;
1477 	}
1478 
1479 	for (cs = 0; cs < max_cs; cs++) {
1480 		reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1481 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1482 			for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1483 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1484 				for (pup = start_pup; pup <= end_pup; pup++)
1485 					ctrl_sweepres[adll][if_id][pup] = 0;
1486 			}
1487 		}
1488 
1489 		for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1490 			ctrl_adll[adll] = 0;
1491 			ctrl_level_phase[adll] = 0;
1492 			ctrl_adll1[adll] = 0;
1493 		}
1494 
1495 		/* save leveling value after running algorithm */
1496 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1497 		read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1498 
1499 		if (direction == 0)
1500 			ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1501 						 CTX_PHY_REG(cs), MASK_ALL_BITS);
1502 
1503 		/* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1504 		 * and perform BIST on each stage
1505 		 */
1506 		for (pup = start_pup; pup <= end_pup; pup++) {
1507 			for (adll = 0; adll < ADLL_LENGTH; adll++) {
1508 				for (rep = 0; rep < repeat_num; rep++) {
1509 					adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1510 					for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1511 						start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1512 							     (ctrl_level_phase[if_id * cs *
1513 									     octets_per_if_num +
1514 									     pup] >> 6) * 32;
1515 
1516 						if (direction == 0)
1517 							start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1518 						else
1519 							start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1520 
1521 						adll_value += start_adll;
1522 
1523 						gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1524 						      ctrl_adll[if_id * cs * octets_per_if_num + pup];
1525 						gap = (((adll_value % 32) + gap) % 64);
1526 
1527 						adll_value = ((adll_value % 32) +
1528 							       (((adll_value - (adll_value % 32)) / 32) << 6));
1529 
1530 						CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1531 										ACCESS_TYPE_UNICAST,
1532 										if_id,
1533 										pup_access,
1534 										pup,
1535 										DDR_PHY_DATA,
1536 										reg,
1537 										adll_value));
1538 						if (direction == 0)
1539 							CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1540 											ACCESS_TYPE_UNICAST,
1541 											if_id,
1542 											pup_access,
1543 											pup,
1544 											DDR_PHY_DATA,
1545 											CTX_PHY_REG(cs),
1546 											gap));
1547 					}
1548 
1549 					for (pattern_idx = PATTERN_KILLER_DQ0;
1550 					     pattern_idx < PATTERN_LAST;
1551 					     pattern_idx++) {
1552 						hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1553 						ddr3_tip_reset_fifo_ptr(dev_num);
1554 						for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1555 							VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1556 							if (pup != 4) { /* TODO: remove literal */
1557 								ctrl_sweepres[adll][if_id][pup] += res[if_id];
1558 							} else {
1559 								CHECK_STATUS(ddr3_tip_if_read(dev_num,
1560 											      ACCESS_TYPE_UNICAST,
1561 											      if_id,
1562 											      0x1458,
1563 											      read_data,
1564 											      MASK_ALL_BITS));
1565 								ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1566 								CHECK_STATUS(ddr3_tip_if_write(dev_num,
1567 											       ACCESS_TYPE_UNICAST,
1568 											       if_id,
1569 											       0x1458,
1570 											       0x0,
1571 											       0xFFFFFFFF));
1572 								CHECK_STATUS(ddr3_tip_if_write(dev_num,
1573 											       ACCESS_TYPE_UNICAST,
1574 											       if_id,
1575 											       0x145C,
1576 											       0x0,
1577 											       0xFFFFFFFF));
1578 							}
1579 						}
1580 					}
1581 				}
1582 			}
1583 
1584 			for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1585 				start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1586 					     ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1587 				CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1588 								DDR_PHY_DATA, reg, start_adll));
1589 				if (direction == 0)
1590 					CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1591 									ACCESS_TYPE_UNICAST,
1592 									if_id,
1593 									pup_access,
1594 									pup,
1595 									DDR_PHY_DATA,
1596 									CTX_PHY_REG(cs),
1597 									ctrl_adll1[if_id *
1598 										   cs *
1599 										   octets_per_if_num +
1600 										   pup]));
1601 			}
1602 		}
1603 
1604 		printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1605 
1606 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1607 			VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1608 			if (mode == 1) {
1609 				for (pup = start_pup; pup <= end_pup; pup++) {
1610 					VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1611 					printf("I/F%d-PHY%d , ", if_id, pup);
1612 				}
1613 			} else {
1614 				printf("I/F%d , ", if_id);
1615 			}
1616 		}
1617 		printf("\n");
1618 
1619 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1620 			adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1621 			printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1622 
1623 			for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1624 				VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1625 				for (pup = start_pup; pup <= end_pup; pup++)
1626 					printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1627 			}
1628 			printf("\n");
1629 		}
1630 
1631 		/* write back to the phy the Rx DQS value, we store in the beginning */
1632 		write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1633 		if (direction == 0)
1634 			ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1635 
1636 		/* print adll results */
1637 		ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1638 		printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1639 		print_adll(dev_num, ctrl_adll);
1640 		print_ph(dev_num, ctrl_level_phase);
1641 	}
1642 	ddr3_tip_reset_fifo_ptr(dev_num);
1643 
1644 	return 0;
1645 }
1646 #endif /* EXCLUDE_SWITCH_DEBUG */
1647 
print_topology(struct mv_ddr_topology_map * topology_db)1648 void print_topology(struct mv_ddr_topology_map *topology_db)
1649 {
1650 	u32 ui, uj;
1651 	u32 dev_num = 0;
1652 
1653 	printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1654 	printf("\tNumber of buses: 0x%x\n",
1655 	       ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
1656 	printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1657 
1658 	for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1659 		VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
1660 		printf("\n\tInterface ID: %d\n", ui);
1661 		printf("\t\tDDR Frequency: %s\n",
1662 		       convert_freq(topology_db->
1663 				    interface_params[ui].memory_freq));
1664 		printf("\t\tSpeed_bin: %d\n",
1665 		       topology_db->interface_params[ui].speed_bin_index);
1666 		printf("\t\tBus_width: %d\n",
1667 		       (4 << topology_db->interface_params[ui].bus_width));
1668 		printf("\t\tMem_size: %s\n",
1669 		       convert_mem_size(topology_db->
1670 					interface_params[ui].memory_size));
1671 		printf("\t\tCAS-WL: %d\n",
1672 		       topology_db->interface_params[ui].cas_wl);
1673 		printf("\t\tCAS-L: %d\n",
1674 		       topology_db->interface_params[ui].cas_l);
1675 		printf("\t\tTemperature: %d\n",
1676 		       topology_db->interface_params[ui].interface_temp);
1677 		printf("\n");
1678 		for (uj = 0; uj < 4; uj++) {
1679 			printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1680 			       topology_db->interface_params[ui].
1681 			       as_bus_params[uj].cs_bitmask);
1682 			printf("Mirror: 0x%x\t",
1683 			       topology_db->interface_params[ui].
1684 			       as_bus_params[uj].mirror_enable_bitmask);
1685 			printf("DQS Swap is %s \t",
1686 			       (topology_db->
1687 				interface_params[ui].as_bus_params[uj].
1688 				is_dqs_swap == 1) ? "enabled" : "disabled");
1689 			printf("Ck Swap:%s\t",
1690 			       (topology_db->
1691 				interface_params[ui].as_bus_params[uj].
1692 				is_ck_swap == 1) ? "enabled" : "disabled");
1693 			printf("\n");
1694 		}
1695 	}
1696 }
1697 #endif /* DDR_VIEWER_TOOL */
1698 
1699 #if !defined(EXCLUDE_SWITCH_DEBUG)
1700 /*
1701  * Execute XSB Test transaction (rd/wr/both)
1702  */
run_xsb_test(u32 dev_num,u32 mem_addr,u32 write_type,u32 read_type,u32 burst_length)1703 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1704 		 u32 read_type, u32 burst_length)
1705 {
1706 	u32 seq = 0, if_id = 0, addr, cnt;
1707 	int ret = MV_OK, ret_tmp;
1708 	u32 data_read[MAX_INTERFACE_NUM];
1709 	struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1710 
1711 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1712 		VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1713 		addr = mem_addr;
1714 		for (cnt = 0; cnt <= burst_length; cnt++) {
1715 			seq = (seq + 1) % 8;
1716 			if (write_type != 0) {
1717 				CHECK_STATUS(ddr3_tip_ext_write
1718 					     (dev_num, if_id, addr, 1,
1719 					      xsb_test_table[seq]));
1720 			}
1721 			if (read_type != 0) {
1722 				CHECK_STATUS(ddr3_tip_ext_read
1723 					     (dev_num, if_id, addr, 1,
1724 					      data_read));
1725 			}
1726 			if ((read_type != 0) && (write_type != 0)) {
1727 				ret_tmp =
1728 					ddr3_tip_compare(if_id,
1729 							 xsb_test_table[seq],
1730 							 data_read,
1731 							 0xff);
1732 				addr += (EXT_ACCESS_BURST_LENGTH * 4);
1733 				ret = (ret != MV_OK) ? ret : ret_tmp;
1734 			}
1735 		}
1736 	}
1737 
1738 	return ret;
1739 }
1740 
1741 #else /*EXCLUDE_SWITCH_DEBUG */
1742 u32 start_xsb_offset = 0;
1743 
run_xsb_test(u32 dev_num,u32 mem_addr,u32 write_type,u32 read_type,u32 burst_length)1744 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1745 		 u32 read_type, u32 burst_length)
1746 {
1747 	return MV_OK;
1748 }
1749 
1750 #endif /* EXCLUDE_SWITCH_DEBUG */
1751