1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) Marvell International Ltd. and its affiliates
4 */
5
6 #include "ddr3_init.h"
7
8 #define TYPICAL_PBS_VALUE 12
9
10 u32 nominal_adll[MAX_INTERFACE_NUM * MAX_BUS_NUM];
11 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
12 u8 result_mat[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
13 u8 result_mat_rx_dqs[MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM];
14 /* 4-EEWA, 3-EWA, 2-SWA, 1-Fail, 0-Pass */
15 u8 result_all_bit[MAX_BUS_NUM * BUS_WIDTH_IN_BITS * MAX_INTERFACE_NUM];
16 u8 max_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
17 u8 min_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
18 u8 max_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
19 u8 min_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
20 u32 pbsdelay_per_pup[NUM_OF_PBS_MODES][MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM];
21 u8 adll_shift_lock[MAX_INTERFACE_NUM][MAX_BUS_NUM];
22 u8 adll_shift_val[MAX_INTERFACE_NUM][MAX_BUS_NUM];
23 enum hws_pattern pbs_pattern = PATTERN_VREF;
24 static u8 pup_state[MAX_INTERFACE_NUM][MAX_BUS_NUM];
25
26 /*
27 * Name: ddr3_tip_pbs
28 * Desc: PBS
29 * Args: TBD
30 * Notes:
31 * Returns: OK if success, other error code if fail.
32 */
ddr3_tip_pbs(u32 dev_num,enum pbs_dir pbs_mode)33 int ddr3_tip_pbs(u32 dev_num, enum pbs_dir pbs_mode)
34 {
35 u32 res0[MAX_INTERFACE_NUM];
36 int adll_tap = MEGA / freq_val[medium_freq] / 64;
37 int pad_num = 0;
38 enum hws_search_dir search_dir =
39 (pbs_mode == PBS_RX_MODE) ? HWS_HIGH2LOW : HWS_LOW2HIGH;
40 enum hws_dir dir = (pbs_mode == PBS_RX_MODE) ? OPER_READ : OPER_WRITE;
41 int iterations = (pbs_mode == PBS_RX_MODE) ? 31 : 63;
42 u32 res_valid_mask = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
43 int init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
44 enum hws_edge_compare search_edge = EDGE_FP;
45 u32 pup = 0, bit = 0, if_id = 0, all_lock = 0, cs_num = 0;
46 u32 reg_addr = 0;
47 u32 validation_val = 0;
48 u32 cs_enable_reg_val[MAX_INTERFACE_NUM];
49 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
50 u8 temp = 0;
51 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
52 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
53
54 /* save current cs enable reg val */
55 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
56 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
57
58 /* save current cs enable reg val */
59 CHECK_STATUS(ddr3_tip_if_read
60 (dev_num, ACCESS_TYPE_UNICAST, if_id,
61 DUAL_DUNIT_CFG_REG, cs_enable_reg_val, MASK_ALL_BITS));
62
63 /* enable single cs */
64 CHECK_STATUS(ddr3_tip_if_write
65 (dev_num, ACCESS_TYPE_UNICAST, if_id,
66 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3)));
67 }
68
69 reg_addr = (pbs_mode == PBS_RX_MODE) ?
70 CRX_PHY_REG(effective_cs) :
71 CTX_PHY_REG(effective_cs);
72 ddr3_tip_read_adll_value(dev_num, nominal_adll, reg_addr, MASK_ALL_BITS);
73
74 /* stage 1 shift ADLL */
75 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
76 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
77 PARAM_NOT_CARE, RESULT_PER_BIT,
78 HWS_CONTROL_ELEMENT_ADLL, search_dir, dir,
79 tm->if_act_mask, init_val, iterations,
80 pbs_pattern, search_edge, CS_SINGLE, cs_num,
81 train_status);
82 validation_val = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0;
83 for (pup = 0; pup < octets_per_if_num; pup++) {
84 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
85 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
86 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
87 min_adll_per_pup[if_id][pup] =
88 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
89 pup_state[if_id][pup] = 0x3;
90 adll_shift_lock[if_id][pup] = 1;
91 max_adll_per_pup[if_id][pup] = 0x0;
92 }
93 }
94
95 /* EBA */
96 for (pup = 0; pup < octets_per_if_num; pup++) {
97 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
98 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
99 CHECK_STATUS(ddr3_tip_if_read
100 (dev_num, ACCESS_TYPE_MULTICAST,
101 PARAM_NOT_CARE,
102 mask_results_dq_reg_map[
103 bit + pup * BUS_WIDTH_IN_BITS],
104 res0, MASK_ALL_BITS));
105 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
106 if_id++) {
107 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
108 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
109 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
110 if_id, bit, pup,
111 res0[if_id]));
112 if (pup_state[if_id][pup] != 3)
113 continue;
114 /* if not EBA state than move to next pup */
115
116 if ((res0[if_id] & 0x2000000) == 0) {
117 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
118 ("-- Fail Training IP\n"));
119 /* training machine failed */
120 pup_state[if_id][pup] = 1;
121 adll_shift_lock[if_id][pup] = 0;
122 continue;
123 }
124
125 else if ((res0[if_id] & res_valid_mask) ==
126 validation_val) {
127 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
128 ("-- FAIL EBA %d %d %d %d\n",
129 if_id, bit, pup,
130 res0[if_id]));
131 pup_state[if_id][pup] = 4;
132 /* this pup move to EEBA */
133 adll_shift_lock[if_id][pup] = 0;
134 continue;
135 } else {
136 /*
137 * The search ended in Pass we need
138 * Fail
139 */
140 res0[if_id] =
141 (pbs_mode == PBS_RX_MODE) ?
142 ((res0[if_id] &
143 res_valid_mask) + 1) :
144 ((res0[if_id] &
145 res_valid_mask) - 1);
146 max_adll_per_pup[if_id][pup] =
147 (max_adll_per_pup[if_id][pup] <
148 res0[if_id]) ?
149 (u8)res0[if_id] :
150 max_adll_per_pup[if_id][pup];
151 min_adll_per_pup[if_id][pup] =
152 (res0[if_id] >
153 min_adll_per_pup[if_id][pup]) ?
154 min_adll_per_pup[if_id][pup] :
155 (u8)
156 res0[if_id];
157 /*
158 * vs the Rx we are searching for the
159 * smallest value of DQ shift so all
160 * Bus would fail
161 */
162 adll_shift_val[if_id][pup] =
163 (pbs_mode == PBS_RX_MODE) ?
164 max_adll_per_pup[if_id][pup] :
165 min_adll_per_pup[if_id][pup];
166 }
167 }
168 }
169 }
170
171 /* EEBA */
172 for (pup = 0; pup < octets_per_if_num; pup++) {
173 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
174 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
175 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
176
177 if (pup_state[if_id][pup] != 4)
178 continue;
179 /*
180 * if pup state different from EEBA than move to
181 * next pup
182 */
183 reg_addr = (pbs_mode == PBS_RX_MODE) ?
184 (0x54 + effective_cs * 0x10) :
185 (0x14 + effective_cs * 0x10);
186 CHECK_STATUS(ddr3_tip_bus_write
187 (dev_num, ACCESS_TYPE_UNICAST, if_id,
188 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
189 reg_addr, 0x1f));
190 reg_addr = (pbs_mode == PBS_RX_MODE) ?
191 (0x55 + effective_cs * 0x10) :
192 (0x15 + effective_cs * 0x10);
193 CHECK_STATUS(ddr3_tip_bus_write
194 (dev_num, ACCESS_TYPE_UNICAST, if_id,
195 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
196 reg_addr, 0x1f));
197 /* initialize the Edge2 Max. */
198 adll_shift_val[if_id][pup] = 0;
199 min_adll_per_pup[if_id][pup] =
200 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
201 max_adll_per_pup[if_id][pup] = 0x0;
202
203 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
204 PARAM_NOT_CARE,
205 ACCESS_TYPE_MULTICAST,
206 PARAM_NOT_CARE, RESULT_PER_BIT,
207 HWS_CONTROL_ELEMENT_ADLL,
208 search_dir, dir,
209 tm->if_act_mask, init_val,
210 iterations, pbs_pattern,
211 search_edge, CS_SINGLE, cs_num,
212 train_status);
213 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
214 ("ADLL shift results:\n"));
215
216 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
217 CHECK_STATUS(ddr3_tip_if_read
218 (dev_num, ACCESS_TYPE_MULTICAST,
219 PARAM_NOT_CARE,
220 mask_results_dq_reg_map[
221 bit + pup *
222 BUS_WIDTH_IN_BITS],
223 res0, MASK_ALL_BITS));
224 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
225 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
226 if_id, bit, pup,
227 res0[if_id]));
228
229 if ((res0[if_id] & 0x2000000) == 0) {
230 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
231 (" -- EEBA Fail\n"));
232 bit = BUS_WIDTH_IN_BITS;
233 /* exit bit loop */
234 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
235 ("-- EEBA Fail Training IP\n"));
236 /*
237 * training machine failed but pass
238 * before in the EBA so maybe the DQS
239 * shift change env.
240 */
241 pup_state[if_id][pup] = 2;
242 adll_shift_lock[if_id][pup] = 0;
243 reg_addr = (pbs_mode == PBS_RX_MODE) ?
244 (0x54 + effective_cs * 0x10) :
245 (0x14 + effective_cs * 0x10);
246 CHECK_STATUS(ddr3_tip_bus_write
247 (dev_num,
248 ACCESS_TYPE_UNICAST,
249 if_id,
250 ACCESS_TYPE_UNICAST, pup,
251 DDR_PHY_DATA, reg_addr,
252 0x0));
253 reg_addr = (pbs_mode == PBS_RX_MODE) ?
254 (0x55 + effective_cs * 0x10) :
255 (0x15 + effective_cs * 0x10);
256 CHECK_STATUS(ddr3_tip_bus_write
257 (dev_num,
258 ACCESS_TYPE_UNICAST,
259 if_id,
260 ACCESS_TYPE_UNICAST, pup,
261 DDR_PHY_DATA, reg_addr,
262 0x0));
263 continue;
264 } else if ((res0[if_id] & res_valid_mask) ==
265 validation_val) {
266 /* exit bit loop */
267 bit = BUS_WIDTH_IN_BITS;
268 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
269 ("-- FAIL EEBA\n"));
270 /* this pup move to SBA */
271 pup_state[if_id][pup] = 2;
272 adll_shift_lock[if_id][pup] = 0;
273 reg_addr = (pbs_mode == PBS_RX_MODE) ?
274 (0x54 + effective_cs * 0x10) :
275 (0x14 + effective_cs * 0x10);
276 CHECK_STATUS(ddr3_tip_bus_write
277 (dev_num,
278 ACCESS_TYPE_UNICAST,
279 if_id,
280 ACCESS_TYPE_UNICAST, pup,
281 DDR_PHY_DATA, reg_addr,
282 0x0));
283 reg_addr = (pbs_mode == PBS_RX_MODE) ?
284 (0x55 + effective_cs * 0x10) :
285 (0x15 + effective_cs * 0x10);
286 CHECK_STATUS(ddr3_tip_bus_write
287 (dev_num,
288 ACCESS_TYPE_UNICAST,
289 if_id,
290 ACCESS_TYPE_UNICAST, pup,
291 DDR_PHY_DATA, reg_addr,
292 0x0));
293 continue;
294 } else {
295 adll_shift_lock[if_id][pup] = 1;
296 /*
297 * The search ended in Pass we need
298 * Fail
299 */
300 res0[if_id] =
301 (pbs_mode == PBS_RX_MODE) ?
302 ((res0[if_id] &
303 res_valid_mask) + 1) :
304 ((res0[if_id] &
305 res_valid_mask) - 1);
306 max_adll_per_pup[if_id][pup] =
307 (max_adll_per_pup[if_id][pup] <
308 res0[if_id]) ?
309 (u8)res0[if_id] :
310 max_adll_per_pup[if_id][pup];
311 min_adll_per_pup[if_id][pup] =
312 (res0[if_id] >
313 min_adll_per_pup[if_id][pup]) ?
314 min_adll_per_pup[if_id][pup] :
315 (u8)res0[if_id];
316 /*
317 * vs the Rx we are searching for the
318 * smallest value of DQ shift so all Bus
319 * would fail
320 */
321 adll_shift_val[if_id][pup] =
322 (pbs_mode == PBS_RX_MODE) ?
323 max_adll_per_pup[if_id][pup] :
324 min_adll_per_pup[if_id][pup];
325 }
326 }
327 }
328 }
329
330 /* Print Stage result */
331 for (pup = 0; pup < octets_per_if_num; pup++) {
332 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
333 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
334 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
335 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
336 ("FP I/F %d, ADLL Shift for EBA: pup[%d] Lock status = %d Lock Val = %d,%d\n",
337 if_id, pup,
338 adll_shift_lock[if_id][pup],
339 max_adll_per_pup[if_id][pup],
340 min_adll_per_pup[if_id][pup]));
341 }
342 }
343 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
344 ("Update ADLL Shift of all pups:\n"));
345
346 for (pup = 0; pup < octets_per_if_num; pup++) {
347 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
348 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
349 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
350 if (adll_shift_lock[if_id][pup] != 1)
351 continue;
352 /* if pup not locked continue to next pup */
353
354 reg_addr = (pbs_mode == PBS_RX_MODE) ?
355 (0x3 + effective_cs * 4) :
356 (0x1 + effective_cs * 4);
357 CHECK_STATUS(ddr3_tip_bus_write
358 (dev_num, ACCESS_TYPE_UNICAST, if_id,
359 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
360 reg_addr, adll_shift_val[if_id][pup]));
361 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
362 ("FP I/F %d, Pup[%d] = %d\n", if_id,
363 pup, adll_shift_val[if_id][pup]));
364 }
365 }
366
367 /* PBS EEBA&EBA */
368 /* Start the Per Bit Skew search */
369 for (pup = 0; pup < octets_per_if_num; pup++) {
370 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
371 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
372 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
373 max_pbs_per_pup[if_id][pup] = 0x0;
374 min_pbs_per_pup[if_id][pup] = 0x1f;
375 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
376 /* reset result for PBS */
377 result_all_bit[bit + pup * BUS_WIDTH_IN_BITS +
378 if_id * MAX_BUS_NUM *
379 BUS_WIDTH_IN_BITS] = 0;
380 }
381 }
382 }
383
384 iterations = 31;
385 search_dir = HWS_LOW2HIGH;
386 /* !!!!! ran sh (search_dir == HWS_LOW2HIGH)?0:iterations; */
387 init_val = 0;
388
389 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
390 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
391 RESULT_PER_BIT, HWS_CONTROL_ELEMENT_DQ_SKEW,
392 search_dir, dir, tm->if_act_mask, init_val,
393 iterations, pbs_pattern, search_edge,
394 CS_SINGLE, cs_num, train_status);
395
396 for (pup = 0; pup < octets_per_if_num; pup++) {
397 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
398 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
399 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
400 if (adll_shift_lock[if_id][pup] != 1) {
401 /* if pup not lock continue to next pup */
402 continue;
403 }
404
405 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
406 CHECK_STATUS(ddr3_tip_if_read
407 (dev_num, ACCESS_TYPE_MULTICAST,
408 PARAM_NOT_CARE,
409 mask_results_dq_reg_map[
410 bit +
411 pup * BUS_WIDTH_IN_BITS],
412 res0, MASK_ALL_BITS));
413 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
414 ("Per Bit Skew search, FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
415 if_id, bit, pup,
416 res0[if_id]));
417 if ((res0[if_id] & 0x2000000) == 0) {
418 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
419 ("--EBA PBS Fail - Training IP machine\n"));
420 /* exit the bit loop */
421 bit = BUS_WIDTH_IN_BITS;
422 /*
423 * ADLL is no long in lock need new
424 * search
425 */
426 adll_shift_lock[if_id][pup] = 0;
427 /* Move to SBA */
428 pup_state[if_id][pup] = 2;
429 max_pbs_per_pup[if_id][pup] = 0x0;
430 min_pbs_per_pup[if_id][pup] = 0x1f;
431 continue;
432 } else {
433 temp = (u8)(res0[if_id] &
434 res_valid_mask);
435 max_pbs_per_pup[if_id][pup] =
436 (temp >
437 max_pbs_per_pup[if_id][pup]) ?
438 temp :
439 max_pbs_per_pup[if_id][pup];
440 min_pbs_per_pup[if_id][pup] =
441 (temp <
442 min_pbs_per_pup[if_id][pup]) ?
443 temp :
444 min_pbs_per_pup[if_id][pup];
445 result_all_bit[bit +
446 pup * BUS_WIDTH_IN_BITS +
447 if_id * MAX_BUS_NUM *
448 BUS_WIDTH_IN_BITS] =
449 temp;
450 }
451 }
452 }
453 }
454
455 /* Check all Pup lock */
456 all_lock = 1;
457 for (pup = 0; pup < octets_per_if_num; pup++) {
458 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
459 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
460 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
461 all_lock = all_lock * adll_shift_lock[if_id][pup];
462 }
463 }
464
465 /* Only if not all Pups Lock */
466 if (all_lock == 0) {
467 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
468 ("##########ADLL shift for SBA###########\n"));
469
470 /* ADLL shift for SBA */
471 search_dir = (pbs_mode == PBS_RX_MODE) ? HWS_LOW2HIGH :
472 HWS_HIGH2LOW;
473 init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
474 for (pup = 0; pup < octets_per_if_num; pup++) {
475 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
476 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
477 if_id++) {
478 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
479 if (adll_shift_lock[if_id][pup] == 1) {
480 /*if pup lock continue to next pup */
481 continue;
482 }
483 /*init the var altogth init before */
484 adll_shift_lock[if_id][pup] = 0;
485 reg_addr = (pbs_mode == PBS_RX_MODE) ?
486 (0x54 + effective_cs * 0x10) :
487 (0x14 + effective_cs * 0x10);
488 CHECK_STATUS(ddr3_tip_bus_write
489 (dev_num, ACCESS_TYPE_UNICAST,
490 if_id, ACCESS_TYPE_UNICAST, pup,
491 DDR_PHY_DATA, reg_addr, 0));
492 reg_addr = (pbs_mode == PBS_RX_MODE) ?
493 (0x55 + effective_cs * 0x10) :
494 (0x15 + effective_cs * 0x10);
495 CHECK_STATUS(ddr3_tip_bus_write
496 (dev_num, ACCESS_TYPE_UNICAST,
497 if_id, ACCESS_TYPE_UNICAST, pup,
498 DDR_PHY_DATA, reg_addr, 0));
499 reg_addr = (pbs_mode == PBS_RX_MODE) ?
500 (0x5f + effective_cs * 0x10) :
501 (0x1f + effective_cs * 0x10);
502 CHECK_STATUS(ddr3_tip_bus_write
503 (dev_num, ACCESS_TYPE_UNICAST,
504 if_id, ACCESS_TYPE_UNICAST, pup,
505 DDR_PHY_DATA, reg_addr, 0));
506 /* initilaze the Edge2 Max. */
507 adll_shift_val[if_id][pup] = 0;
508 min_adll_per_pup[if_id][pup] = 0x1f;
509 max_adll_per_pup[if_id][pup] = 0x0;
510
511 ddr3_tip_ip_training(dev_num,
512 ACCESS_TYPE_MULTICAST,
513 PARAM_NOT_CARE,
514 ACCESS_TYPE_MULTICAST,
515 PARAM_NOT_CARE,
516 RESULT_PER_BIT,
517 HWS_CONTROL_ELEMENT_ADLL,
518 search_dir, dir,
519 tm->if_act_mask,
520 init_val, iterations,
521 pbs_pattern,
522 search_edge, CS_SINGLE,
523 cs_num, train_status);
524
525 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
526 CHECK_STATUS(ddr3_tip_if_read
527 (dev_num,
528 ACCESS_TYPE_MULTICAST,
529 PARAM_NOT_CARE,
530 mask_results_dq_reg_map
531 [bit +
532 pup *
533 BUS_WIDTH_IN_BITS],
534 res0, MASK_ALL_BITS));
535 DEBUG_PBS_ENGINE(
536 DEBUG_LEVEL_INFO,
537 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
538 if_id, bit, pup, res0[if_id]));
539 if ((res0[if_id] & 0x2000000) == 0) {
540 /* exit the bit loop */
541 bit = BUS_WIDTH_IN_BITS;
542 /* Fail SBA --> Fail PBS */
543 pup_state[if_id][pup] = 1;
544 DEBUG_PBS_ENGINE
545 (DEBUG_LEVEL_INFO,
546 (" SBA Fail\n"));
547 continue;
548 } else {
549 /*
550 * - increment to get all
551 * 8 bit lock.
552 */
553 adll_shift_lock[if_id][pup]++;
554 /*
555 * The search ended in Pass
556 * we need Fail
557 */
558 res0[if_id] =
559 (pbs_mode == PBS_RX_MODE) ?
560 ((res0[if_id] & res_valid_mask) + 1) :
561 ((res0[if_id] & res_valid_mask) - 1);
562 max_adll_per_pup[if_id][pup] =
563 (max_adll_per_pup[if_id]
564 [pup] < res0[if_id]) ?
565 (u8)res0[if_id] :
566 max_adll_per_pup[if_id][pup];
567 min_adll_per_pup[if_id][pup] =
568 (res0[if_id] >
569 min_adll_per_pup[if_id]
570 [pup]) ?
571 min_adll_per_pup[if_id][pup] :
572 (u8)res0[if_id];
573 /*
574 * vs the Rx we are searching for
575 * the smallest value of DQ shift
576 * so all Bus would fail
577 */
578 adll_shift_val[if_id][pup] =
579 (pbs_mode == PBS_RX_MODE) ?
580 max_adll_per_pup[if_id][pup] :
581 min_adll_per_pup[if_id][pup];
582 }
583 }
584 /* 1 is lock */
585 adll_shift_lock[if_id][pup] =
586 (adll_shift_lock[if_id][pup] == 8) ?
587 1 : 0;
588 reg_addr = (pbs_mode == PBS_RX_MODE) ?
589 (0x3 + effective_cs * 4) :
590 (0x1 + effective_cs * 4);
591 CHECK_STATUS(ddr3_tip_bus_write
592 (dev_num, ACCESS_TYPE_UNICAST,
593 if_id, ACCESS_TYPE_UNICAST, pup,
594 DDR_PHY_DATA, reg_addr,
595 adll_shift_val[if_id][pup]));
596 DEBUG_PBS_ENGINE(
597 DEBUG_LEVEL_INFO,
598 ("adll_shift_lock[%x][%x] = %x\n",
599 if_id, pup,
600 adll_shift_lock[if_id][pup]));
601 }
602 }
603
604 /* End ADLL Shift for SBA */
605 /* Start the Per Bit Skew search */
606 /* The ADLL shift finished with a Pass */
607 search_edge = (pbs_mode == PBS_RX_MODE) ? EDGE_PF : EDGE_FP;
608 search_dir = (pbs_mode == PBS_RX_MODE) ?
609 HWS_LOW2HIGH : HWS_HIGH2LOW;
610 iterations = 0x1f;
611 /* - The initial value is different in Rx and Tx mode */
612 init_val = (pbs_mode == PBS_RX_MODE) ? 0 : iterations;
613
614 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
615 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
616 PARAM_NOT_CARE, RESULT_PER_BIT,
617 HWS_CONTROL_ELEMENT_DQ_SKEW,
618 search_dir, dir, tm->if_act_mask,
619 init_val, iterations, pbs_pattern,
620 search_edge, CS_SINGLE, cs_num,
621 train_status);
622
623 for (pup = 0; pup < octets_per_if_num; pup++) {
624 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
625 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
626 if_id++) {
627 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
628 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
629 CHECK_STATUS(ddr3_tip_if_read
630 (dev_num,
631 ACCESS_TYPE_MULTICAST,
632 PARAM_NOT_CARE,
633 mask_results_dq_reg_map
634 [bit +
635 pup *
636 BUS_WIDTH_IN_BITS],
637 res0, MASK_ALL_BITS));
638 if (pup_state[if_id][pup] != 2) {
639 /*
640 * if pup is not SBA continue
641 * to next pup
642 */
643 bit = BUS_WIDTH_IN_BITS;
644 continue;
645 }
646 DEBUG_PBS_ENGINE(
647 DEBUG_LEVEL_INFO,
648 ("Per Bit Skew search, PF I/F %d, bit:%d, pup:%d res0 0x%x\n",
649 if_id, bit, pup, res0[if_id]));
650 if ((res0[if_id] & 0x2000000) == 0) {
651 DEBUG_PBS_ENGINE
652 (DEBUG_LEVEL_INFO,
653 ("SBA Fail\n"));
654
655 max_pbs_per_pup[if_id][pup] =
656 0x1f;
657 result_all_bit[
658 bit + pup *
659 BUS_WIDTH_IN_BITS +
660 if_id * MAX_BUS_NUM *
661 BUS_WIDTH_IN_BITS] =
662 0x1f;
663 } else {
664 temp = (u8)(res0[if_id] &
665 res_valid_mask);
666 max_pbs_per_pup[if_id][pup] =
667 (temp >
668 max_pbs_per_pup[if_id]
669 [pup]) ? temp :
670 max_pbs_per_pup
671 [if_id][pup];
672 min_pbs_per_pup[if_id][pup] =
673 (temp <
674 min_pbs_per_pup[if_id]
675 [pup]) ? temp :
676 min_pbs_per_pup
677 [if_id][pup];
678 result_all_bit[
679 bit + pup *
680 BUS_WIDTH_IN_BITS +
681 if_id * MAX_BUS_NUM *
682 BUS_WIDTH_IN_BITS] =
683 temp;
684 adll_shift_lock[if_id][pup] = 1;
685 }
686 }
687 }
688 }
689
690 /* Check all Pup state */
691 all_lock = 1;
692 for (pup = 0; pup < octets_per_if_num; pup++) {
693 /*
694 * DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
695 * ("pup_state[%d][%d] = %d\n",if_id,pup,pup_state
696 * [if_id][pup]));
697 */
698 }
699 }
700
701 /* END OF SBA */
702 /* Norm */
703 for (pup = 0; pup < octets_per_if_num; pup++) {
704 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
705 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
706 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
707 if_id++) {
708 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
709 /* if pup not lock continue to next pup */
710 if (adll_shift_lock[if_id][pup] != 1) {
711 DEBUG_PBS_ENGINE(
712 DEBUG_LEVEL_ERROR,
713 ("PBS failed for IF #%d\n",
714 if_id));
715 training_result[training_stage][if_id]
716 = TEST_FAILED;
717
718 result_mat[if_id][pup][bit] = 0;
719 max_pbs_per_pup[if_id][pup] = 0;
720 min_pbs_per_pup[if_id][pup] = 0;
721 } else {
722 training_result[
723 training_stage][if_id] =
724 (training_result[training_stage]
725 [if_id] == TEST_FAILED) ?
726 TEST_FAILED : TEST_SUCCESS;
727 result_mat[if_id][pup][bit] =
728 result_all_bit[
729 bit + pup *
730 BUS_WIDTH_IN_BITS +
731 if_id * MAX_BUS_NUM *
732 BUS_WIDTH_IN_BITS] -
733 min_pbs_per_pup[if_id][pup];
734 }
735 DEBUG_PBS_ENGINE(
736 DEBUG_LEVEL_INFO,
737 ("The abs min_pbs[%d][%d] = %d\n",
738 if_id, pup,
739 min_pbs_per_pup[if_id][pup]));
740 }
741 }
742 }
743
744 /* Clean all results */
745 ddr3_tip_clean_pbs_result(dev_num, pbs_mode);
746
747 /* DQ PBS register update with the final result */
748 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
749 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
750 for (pup = 0; pup < octets_per_if_num; pup++) {
751 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
752
753 DEBUG_PBS_ENGINE(
754 DEBUG_LEVEL_INFO,
755 ("Final Results: if_id %d, pup %d, Pup State: %d\n",
756 if_id, pup, pup_state[if_id][pup]));
757 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
758 if (dq_map_table == NULL) {
759 DEBUG_PBS_ENGINE(
760 DEBUG_LEVEL_ERROR,
761 ("dq_map_table not initialized\n"));
762 return MV_FAIL;
763 }
764 pad_num = dq_map_table[
765 bit + pup * BUS_WIDTH_IN_BITS +
766 if_id * BUS_WIDTH_IN_BITS *
767 MAX_BUS_NUM];
768 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
769 ("result_mat: %d ",
770 result_mat[if_id][pup]
771 [bit]));
772 reg_addr = (pbs_mode == PBS_RX_MODE) ?
773 PBS_RX_PHY_REG(effective_cs, 0) :
774 PBS_TX_PHY_REG(effective_cs, 0);
775 CHECK_STATUS(ddr3_tip_bus_write
776 (dev_num, ACCESS_TYPE_UNICAST,
777 if_id, ACCESS_TYPE_UNICAST, pup,
778 DDR_PHY_DATA, reg_addr + pad_num,
779 result_mat[if_id][pup][bit]));
780 }
781
782 if (max_pbs_per_pup[if_id][pup] == min_pbs_per_pup[if_id][pup]) {
783 temp = TYPICAL_PBS_VALUE;
784 } else {
785 temp = ((max_adll_per_pup[if_id][pup] -
786 min_adll_per_pup[if_id][pup]) *
787 adll_tap /
788 (max_pbs_per_pup[if_id][pup] -
789 min_pbs_per_pup[if_id][pup]));
790 }
791 pbsdelay_per_pup[pbs_mode]
792 [if_id][pup][effective_cs] = temp;
793
794 /* RX results ready, write RX also */
795 if (pbs_mode == PBS_TX_MODE) {
796 /* Write TX results */
797 reg_addr = (0x14 + effective_cs * 0x10);
798 CHECK_STATUS(ddr3_tip_bus_write
799 (dev_num, ACCESS_TYPE_UNICAST,
800 if_id, ACCESS_TYPE_UNICAST, pup,
801 DDR_PHY_DATA, reg_addr,
802 (max_pbs_per_pup[if_id][pup] -
803 min_pbs_per_pup[if_id][pup]) /
804 2));
805 reg_addr = (0x15 + effective_cs * 0x10);
806 CHECK_STATUS(ddr3_tip_bus_write
807 (dev_num, ACCESS_TYPE_UNICAST,
808 if_id, ACCESS_TYPE_UNICAST, pup,
809 DDR_PHY_DATA, reg_addr,
810 (max_pbs_per_pup[if_id][pup] -
811 min_pbs_per_pup[if_id][pup]) /
812 2));
813
814 /* Write previously stored RX results */
815 reg_addr = (0x54 + effective_cs * 0x10);
816 CHECK_STATUS(ddr3_tip_bus_write
817 (dev_num, ACCESS_TYPE_UNICAST,
818 if_id, ACCESS_TYPE_UNICAST, pup,
819 DDR_PHY_DATA, reg_addr,
820 result_mat_rx_dqs[if_id][pup]
821 [effective_cs]));
822 reg_addr = (0x55 + effective_cs * 0x10);
823 CHECK_STATUS(ddr3_tip_bus_write
824 (dev_num, ACCESS_TYPE_UNICAST,
825 if_id, ACCESS_TYPE_UNICAST, pup,
826 DDR_PHY_DATA, reg_addr,
827 result_mat_rx_dqs[if_id][pup]
828 [effective_cs]));
829 } else {
830 /*
831 * RX results may affect RL results correctess,
832 * so just store the results that will written
833 * in TX stage
834 */
835 result_mat_rx_dqs[if_id][pup][effective_cs] =
836 (max_pbs_per_pup[if_id][pup] -
837 min_pbs_per_pup[if_id][pup]) / 2;
838 }
839 DEBUG_PBS_ENGINE(
840 DEBUG_LEVEL_INFO,
841 (", PBS tap=%d [psec] ==> skew observed = %d\n",
842 temp,
843 ((max_pbs_per_pup[if_id][pup] -
844 min_pbs_per_pup[if_id][pup]) *
845 temp)));
846 }
847 }
848
849 /* Write back to the phy the default values */
850 reg_addr = (pbs_mode == PBS_RX_MODE) ?
851 CRX_PHY_REG(effective_cs) :
852 CTX_PHY_REG(effective_cs);
853 ddr3_tip_write_adll_value(dev_num, nominal_adll, reg_addr);
854
855 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
856 reg_addr = (pbs_mode == PBS_RX_MODE) ?
857 (0x5a + effective_cs * 0x10) :
858 (0x1a + effective_cs * 0x10);
859 CHECK_STATUS(ddr3_tip_bus_write
860 (dev_num, ACCESS_TYPE_UNICAST, if_id,
861 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, reg_addr,
862 0));
863
864 /* restore cs enable value */
865 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
866 CHECK_STATUS(ddr3_tip_if_write
867 (dev_num, ACCESS_TYPE_UNICAST, if_id,
868 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id],
869 MASK_ALL_BITS));
870 }
871
872 /* exit test mode */
873 CHECK_STATUS(ddr3_tip_if_write
874 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
875 ODPG_WR_RD_MODE_ENA_REG, 0xffff, MASK_ALL_BITS));
876
877 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
878 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
879 for (pup = 0; pup < octets_per_if_num; pup++) {
880 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
881 /*
882 * no valid window found
883 * (no lock at EBA ADLL shift at EBS)
884 */
885 if (pup_state[if_id][pup] == 1)
886 return MV_FAIL;
887 }
888 }
889
890 return MV_OK;
891 }
892
893 /*
894 * Name: ddr3_tip_pbs_rx.
895 * Desc: PBS TX
896 * Args: TBD
897 * Notes:
898 * Returns: OK if success, other error code if fail.
899 */
ddr3_tip_pbs_rx(u32 uidev_num)900 int ddr3_tip_pbs_rx(u32 uidev_num)
901 {
902 return ddr3_tip_pbs(uidev_num, PBS_RX_MODE);
903 }
904
905 /*
906 * Name: ddr3_tip_pbs_tx.
907 * Desc: PBS TX
908 * Args: TBD
909 * Notes:
910 * Returns: OK if success, other error code if fail.
911 */
ddr3_tip_pbs_tx(u32 uidev_num)912 int ddr3_tip_pbs_tx(u32 uidev_num)
913 {
914 return ddr3_tip_pbs(uidev_num, PBS_TX_MODE);
915 }
916
917 #ifdef DDR_VIEWER_TOOL
918 /*
919 * Print PBS Result
920 */
ddr3_tip_print_all_pbs_result(u32 dev_num)921 int ddr3_tip_print_all_pbs_result(u32 dev_num)
922 {
923 u32 curr_cs;
924 u32 max_cs = ddr3_tip_max_cs_get(dev_num);
925
926 for (curr_cs = 0; curr_cs < max_cs; curr_cs++) {
927 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_RX_MODE);
928 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_TX_MODE);
929 }
930
931 return MV_OK;
932 }
933
934 /*
935 * Print PBS Result
936 */
ddr3_tip_print_pbs_result(u32 dev_num,u32 cs_num,enum pbs_dir pbs_mode)937 int ddr3_tip_print_pbs_result(u32 dev_num, u32 cs_num, enum pbs_dir pbs_mode)
938 {
939 u32 data_value = 0, bit = 0, if_id = 0, pup = 0;
940 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
941 PBS_RX_PHY_REG(cs_num, 0) :
942 PBS_TX_PHY_REG(cs_num , 0);
943 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
944 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
945
946 printf("%s,CS%d,PBS,ADLLRATIO,,,",
947 (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx", cs_num);
948
949 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
950 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
951 for (pup = 0; pup < octets_per_if_num; pup++) {
952 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
953 printf("%d,",
954 pbsdelay_per_pup[pbs_mode][if_id][pup][cs_num]);
955 }
956 }
957 printf("CS%d, %s ,PBS\n", cs_num,
958 (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
959
960 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
961 printf("%s, DQ", (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
962 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
963 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
964 printf("%d ,PBS,,, ", bit);
965 for (pup = 0; pup <= octets_per_if_num;
966 pup++) {
967 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
968 CHECK_STATUS(ddr3_tip_bus_read
969 (dev_num, if_id,
970 ACCESS_TYPE_UNICAST, pup,
971 DDR_PHY_DATA, reg_addr + bit,
972 &data_value));
973 printf("%d , ", data_value);
974 }
975 }
976 printf("\n");
977 }
978 printf("\n");
979
980 return MV_OK;
981 }
982 #endif /* DDR_VIEWER_TOOL */
983
984 /*
985 * Fixup PBS Result
986 */
ddr3_tip_clean_pbs_result(u32 dev_num,enum pbs_dir pbs_mode)987 int ddr3_tip_clean_pbs_result(u32 dev_num, enum pbs_dir pbs_mode)
988 {
989 u32 if_id, pup, bit;
990 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
991 PBS_RX_PHY_REG(effective_cs, 0) :
992 PBS_TX_PHY_REG(effective_cs, 0);
993 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
994 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
995
996 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
997 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
998 for (pup = 0; pup <= octets_per_if_num; pup++) {
999 for (bit = 0; bit <= BUS_WIDTH_IN_BITS + 3; bit++) {
1000 CHECK_STATUS(ddr3_tip_bus_write
1001 (dev_num, ACCESS_TYPE_UNICAST,
1002 if_id, ACCESS_TYPE_UNICAST, pup,
1003 DDR_PHY_DATA, reg_addr + bit, 0));
1004 }
1005 }
1006 }
1007
1008 return MV_OK;
1009 }
1010