1
2
3
4
5
6
7
8#include <log.h>
9#include <time.h>
10#include <asm/global_data.h>
11#include <linux/delay.h>
12
13#include <mach/cvmx-regs.h>
14#include <mach/octeon-model.h>
15#include <mach/cvmx-fuse.h>
16#include <mach/octeon-feature.h>
17#include <mach/cvmx-qlm.h>
18#include <mach/octeon_qlm.h>
19#include <mach/cvmx-pcie.h>
20#include <mach/cvmx-helper.h>
21#include <mach/cvmx-helper-util.h>
22#include <mach/cvmx-bgxx-defs.h>
23#include <mach/cvmx-ciu-defs.h>
24#include <mach/cvmx-gmxx-defs.h>
25#include <mach/cvmx-gserx-defs.h>
26#include <mach/cvmx-mio-defs.h>
27#include <mach/cvmx-pciercx-defs.h>
28#include <mach/cvmx-pemx-defs.h>
29#include <mach/cvmx-pexp-defs.h>
30#include <mach/cvmx-rst-defs.h>
31#include <mach/cvmx-sata-defs.h>
32#include <mach/cvmx-sli-defs.h>
33#include <mach/cvmx-sriomaintx-defs.h>
34#include <mach/cvmx-sriox-defs.h>
35
36#include <mach/cvmx-helper.h>
37#include <mach/cvmx-helper-jtag.h>
38
39DECLARE_GLOBAL_DATA_PTR;
40
41
42
43
44
45#define R_25G_REFCLK100 0x0
46#define R_5G_REFCLK100 0x1
47#define R_8G_REFCLK100 0x2
48#define R_125G_REFCLK15625_KX 0x3
49#define R_3125G_REFCLK15625_XAUI 0x4
50#define R_103125G_REFCLK15625_KR 0x5
51#define R_125G_REFCLK15625_SGMII 0x6
52#define R_5G_REFCLK15625_QSGMII 0x7
53#define R_625G_REFCLK15625_RXAUI 0x8
54#define R_25G_REFCLK125 0x9
55#define R_5G_REFCLK125 0xa
56#define R_8G_REFCLK125 0xb
57
58static const int REF_100MHZ = 100000000;
59static const int REF_125MHZ = 125000000;
60static const int REF_156MHZ = 156250000;
61
62static qlm_jtag_uint32_t *__cvmx_qlm_jtag_xor_ref;
63
64
65
66
67
68
69int cvmx_qlm_get_num(void)
70{
71 if (OCTEON_IS_MODEL(OCTEON_CN68XX))
72 return 5;
73 else if (OCTEON_IS_MODEL(OCTEON_CN66XX))
74 return 3;
75 else if (OCTEON_IS_MODEL(OCTEON_CN63XX))
76 return 3;
77 else if (OCTEON_IS_MODEL(OCTEON_CN61XX))
78 return 3;
79 else if (OCTEON_IS_MODEL(OCTEON_CNF71XX))
80 return 2;
81 else if (OCTEON_IS_MODEL(OCTEON_CN78XX))
82 return 8;
83 else if (OCTEON_IS_MODEL(OCTEON_CN73XX))
84 return 7;
85 else if (OCTEON_IS_MODEL(OCTEON_CNF75XX))
86 return 9;
87 return 0;
88}
89
90
91
92
93
94
95
96
97int cvmx_qlm_interface(int xiface)
98{
99 struct cvmx_xiface xi = cvmx_helper_xiface_to_node_interface(xiface);
100
101 if (OCTEON_IS_MODEL(OCTEON_CN61XX)) {
102 return (xi.interface == 0) ? 2 : 0;
103 } else if (OCTEON_IS_MODEL(OCTEON_CN63XX) || OCTEON_IS_MODEL(OCTEON_CN66XX)) {
104 return 2 - xi.interface;
105 } else if (OCTEON_IS_MODEL(OCTEON_CNF71XX)) {
106 if (xi.interface == 0)
107 return 0;
108
109 debug("Warning: %s: Invalid interface %d\n",
110 __func__, xi.interface);
111 } else if (octeon_has_feature(OCTEON_FEATURE_BGX)) {
112 debug("Warning: not supported\n");
113 return -1;
114 }
115
116
117 switch (xi.interface) {
118 case 1:
119 return 0;
120 default:
121 return xi.interface;
122 }
123
124 return -1;
125}
126
127
128
129
130
131
132
133
134
135int cvmx_qlm_lmac(int xiface, int index)
136{
137 struct cvmx_xiface xi = cvmx_helper_xiface_to_node_interface(xiface);
138
139 if (OCTEON_IS_MODEL(OCTEON_CN78XX)) {
140 cvmx_bgxx_cmr_global_config_t gconfig;
141 cvmx_gserx_phy_ctl_t phy_ctl;
142 cvmx_gserx_cfg_t gserx_cfg;
143 int qlm;
144
145 if (xi.interface < 6) {
146 if (xi.interface < 2) {
147 gconfig.u64 =
148 csr_rd_node(xi.node,
149 CVMX_BGXX_CMR_GLOBAL_CONFIG(xi.interface));
150 if (gconfig.s.pmux_sds_sel)
151 qlm = xi.interface + 2;
152 else
153 qlm = xi.interface;
154 } else {
155 qlm = xi.interface + 2;
156 }
157
158
159 phy_ctl.u64 = csr_rd_node(xi.node, CVMX_GSERX_PHY_CTL(qlm));
160 if (phy_ctl.s.phy_pd || phy_ctl.s.phy_reset)
161 return -1;
162 gserx_cfg.u64 = csr_rd_node(xi.node, CVMX_GSERX_CFG(qlm));
163 if (gserx_cfg.s.bgx)
164 return qlm;
165 else
166 return -1;
167 } else if (xi.interface <= 7) {
168 int qlm;
169
170 for (qlm = 4; qlm < 8; qlm++) {
171
172 phy_ctl.u64 = csr_rd_node(xi.node, CVMX_GSERX_PHY_CTL(qlm));
173 if (phy_ctl.s.phy_pd || phy_ctl.s.phy_reset)
174 continue;
175
176 gserx_cfg.u64 = csr_rd_node(xi.node, CVMX_GSERX_CFG(qlm));
177 if (gserx_cfg.s.ila)
178 return qlm;
179 }
180 }
181 return -1;
182 } else if (OCTEON_IS_MODEL(OCTEON_CN73XX)) {
183 cvmx_gserx_phy_ctl_t phy_ctl;
184 cvmx_gserx_cfg_t gserx_cfg;
185 int qlm;
186
187
188 if (xi.interface < 2) {
189 qlm = xi.interface + 2;
190
191 phy_ctl.u64 = csr_rd(CVMX_GSERX_PHY_CTL(qlm));
192 if (phy_ctl.s.phy_pd || phy_ctl.s.phy_reset)
193 return -1;
194
195 gserx_cfg.u64 = csr_rd(CVMX_GSERX_CFG(qlm));
196 if (gserx_cfg.s.bgx)
197 return qlm;
198 else
199 return -1;
200 } else if (xi.interface == 2) {
201 cvmx_gserx_cfg_t g1, g2;
202
203 g1.u64 = csr_rd(CVMX_GSERX_CFG(5));
204 g2.u64 = csr_rd(CVMX_GSERX_CFG(6));
205
206 if (g2.s.bgx) {
207 if (g1.s.bgx) {
208 cvmx_gserx_phy_ctl_t phy_ctl1;
209
210 phy_ctl.u64 = csr_rd(CVMX_GSERX_PHY_CTL(5));
211 phy_ctl1.u64 = csr_rd(CVMX_GSERX_PHY_CTL(6));
212 if ((phy_ctl.s.phy_pd || phy_ctl.s.phy_reset) &&
213 (phy_ctl1.s.phy_pd || phy_ctl1.s.phy_reset))
214 return -1;
215 if (index >= 2)
216 return 6;
217 return 5;
218 } else {
219 phy_ctl.u64 = csr_rd(CVMX_GSERX_PHY_CTL(6));
220 if (phy_ctl.s.phy_pd || phy_ctl.s.phy_reset)
221 return -1;
222 return 6;
223 }
224 } else if (g1.s.bgx) {
225 phy_ctl.u64 = csr_rd(CVMX_GSERX_PHY_CTL(5));
226 if (phy_ctl.s.phy_pd || phy_ctl.s.phy_reset)
227 return -1;
228 return 5;
229 }
230 }
231 return -1;
232 } else if (OCTEON_IS_MODEL(OCTEON_CNF75XX)) {
233 cvmx_gserx_phy_ctl_t phy_ctl;
234 cvmx_gserx_cfg_t gserx_cfg;
235 int qlm;
236
237 if (xi.interface == 0) {
238 cvmx_gserx_cfg_t g1, g2;
239
240 g1.u64 = csr_rd(CVMX_GSERX_CFG(4));
241 g2.u64 = csr_rd(CVMX_GSERX_CFG(5));
242
243 if (g2.s.bgx) {
244 if (g1.s.bgx) {
245 cvmx_gserx_phy_ctl_t phy_ctl1;
246
247 phy_ctl.u64 = csr_rd(CVMX_GSERX_PHY_CTL(4));
248 phy_ctl1.u64 = csr_rd(CVMX_GSERX_PHY_CTL(5));
249 if ((phy_ctl.s.phy_pd || phy_ctl.s.phy_reset) &&
250 (phy_ctl1.s.phy_pd || phy_ctl1.s.phy_reset))
251 return -1;
252 if (index >= 2)
253 return 5;
254 return 4;
255 }
256
257
258 phy_ctl.u64 = csr_rd(CVMX_GSERX_PHY_CTL(5));
259 if (phy_ctl.s.phy_pd || phy_ctl.s.phy_reset)
260 return -1;
261 return 5;
262 } else if (g1.s.bgx) {
263 phy_ctl.u64 = csr_rd(CVMX_GSERX_PHY_CTL(4));
264 if (phy_ctl.s.phy_pd || phy_ctl.s.phy_reset)
265 return -1;
266 return 4;
267 }
268 } else if (xi.interface < 2) {
269 qlm = (xi.interface == 1) ? 2 : 3;
270 gserx_cfg.u64 = csr_rd(CVMX_GSERX_CFG(qlm));
271 if (gserx_cfg.s.srio)
272 return qlm;
273 }
274 return -1;
275 }
276 return -1;
277}
278
279
280
281
282
283
284
285
286int cvmx_qlm_mux_interface(int bgx)
287{
288 int mux = 0;
289 cvmx_gserx_cfg_t gser1, gser2;
290 int qlm1, qlm2;
291
292 if (OCTEON_IS_MODEL(OCTEON_CN73XX) && bgx != 2)
293 return -1;
294 else if (OCTEON_IS_MODEL(OCTEON_CNF75XX) && bgx != 0)
295 return -1;
296
297 if (OCTEON_IS_MODEL(OCTEON_CN73XX)) {
298 qlm1 = 5;
299 qlm2 = 6;
300 } else if (OCTEON_IS_MODEL(OCTEON_CNF75XX)) {
301 qlm1 = 4;
302 qlm2 = 5;
303 } else {
304 return -1;
305 }
306
307 gser1.u64 = csr_rd(CVMX_GSERX_CFG(qlm1));
308 gser2.u64 = csr_rd(CVMX_GSERX_CFG(qlm2));
309
310 if (gser1.s.bgx && gser2.s.bgx)
311 mux = 0;
312 else if (gser1.s.bgx)
313 mux = 1;
314 else if (gser2.s.bgx)
315 mux = 2;
316
317 return mux;
318}
319
320
321
322
323
324
325
326
327int cvmx_qlm_get_lanes(int qlm)
328{
329 if (OCTEON_IS_MODEL(OCTEON_CN61XX) && qlm == 1)
330 return 2;
331 else if (OCTEON_IS_MODEL(OCTEON_CNF71XX))
332 return 2;
333 else if (OCTEON_IS_MODEL(OCTEON_CN73XX))
334 return (qlm < 4) ? 4 : 2 ;
335 else if (OCTEON_IS_MODEL(OCTEON_CNF75XX))
336 return (qlm == 2 || qlm == 3) ? 4 : 2 ;
337 return 4;
338}
339
340
341
342
343
344
345const __cvmx_qlm_jtag_field_t *cvmx_qlm_jtag_get_field(void)
346{
347
348 if (OCTEON_IS_MODEL(OCTEON_CN68XX)) {
349 return __cvmx_qlm_jtag_field_cn68xx;
350 } else if (OCTEON_IS_MODEL(OCTEON_CN66XX) || OCTEON_IS_MODEL(OCTEON_CN61XX) ||
351 OCTEON_IS_MODEL(OCTEON_CNF71XX)) {
352 return __cvmx_qlm_jtag_field_cn66xx;
353 } else if (OCTEON_IS_MODEL(OCTEON_CN63XX)) {
354 return __cvmx_qlm_jtag_field_cn63xx;
355 }
356
357 return NULL;
358}
359
360
361
362
363
364
365
366int cvmx_qlm_jtag_get_length(void)
367{
368 const __cvmx_qlm_jtag_field_t *qlm_ptr = cvmx_qlm_jtag_get_field();
369 int length = 0;
370
371
372 while (qlm_ptr && qlm_ptr->name) {
373 if (qlm_ptr->stop_bit > length)
374 length = qlm_ptr->stop_bit + 1;
375 qlm_ptr++;
376 }
377 return length;
378}
379
380
381
382
383void cvmx_qlm_init(void)
384{
385 if (OCTEON_IS_OCTEON3())
386 return;
387
388
389 printf("Please add support for unsupported Octeon SoC\n");
390}
391
392
393
394
395
396
397
398
399static const __cvmx_qlm_jtag_field_t *__cvmx_qlm_lookup_field(const char *name)
400{
401 const __cvmx_qlm_jtag_field_t *ptr = cvmx_qlm_jtag_get_field();
402
403 while (ptr->name) {
404 if (strcmp(name, ptr->name) == 0)
405 return ptr;
406 ptr++;
407 }
408
409 debug("%s: Illegal field name %s\n", __func__, name);
410 return NULL;
411}
412
413
414
415
416
417
418
419
420
421
422uint64_t cvmx_qlm_jtag_get(int qlm, int lane, const char *name)
423{
424 const __cvmx_qlm_jtag_field_t *field = __cvmx_qlm_lookup_field(name);
425 int qlm_jtag_length = cvmx_qlm_jtag_get_length();
426 int num_lanes = cvmx_qlm_get_lanes(qlm);
427
428 if (!field)
429 return 0;
430
431
432 cvmx_helper_qlm_jtag_capture(qlm);
433
434
435
436
437
438 cvmx_helper_qlm_jtag_shift_zeros(qlm,
439 qlm_jtag_length * (num_lanes - 1 - lane));
440 cvmx_helper_qlm_jtag_shift_zeros(qlm, field->start_bit);
441
442 return cvmx_helper_qlm_jtag_shift(qlm, field->stop_bit - field->start_bit + 1, 0);
443}
444
445
446
447
448
449
450
451
452
453void cvmx_qlm_jtag_set(int qlm, int lane, const char *name, uint64_t value)
454{
455 int i, l;
456 u32 shift_values[CVMX_QLM_JTAG_UINT32];
457 int num_lanes = cvmx_qlm_get_lanes(qlm);
458 const __cvmx_qlm_jtag_field_t *field = __cvmx_qlm_lookup_field(name);
459 int qlm_jtag_length = cvmx_qlm_jtag_get_length();
460 int total_length = qlm_jtag_length * num_lanes;
461 int bits = 0;
462
463 if (!field)
464 return;
465
466
467 cvmx_helper_qlm_jtag_capture(qlm);
468 for (i = 0; i < CVMX_QLM_JTAG_UINT32; i++)
469 shift_values[i] = cvmx_helper_qlm_jtag_shift(qlm, 32, 0);
470
471
472 for (l = 0; l < num_lanes; l++) {
473 u64 new_value = value;
474 int bits;
475 int adj_lanes;
476
477 if (l != lane && lane != -1)
478 continue;
479
480 adj_lanes = (num_lanes - 1 - l) * qlm_jtag_length;
481
482 for (bits = field->start_bit + adj_lanes; bits <= field->stop_bit + adj_lanes;
483 bits++) {
484 if (new_value & 1)
485 shift_values[bits / 32] |= 1 << (bits & 31);
486 else
487 shift_values[bits / 32] &= ~(1 << (bits & 31));
488 new_value >>= 1;
489 }
490 }
491
492
493 while (bits < total_length) {
494 u32 shift = shift_values[bits / 32] ^ __cvmx_qlm_jtag_xor_ref[qlm][bits / 32];
495 int width = total_length - bits;
496
497 if (width > 32)
498 width = 32;
499 cvmx_helper_qlm_jtag_shift(qlm, width, shift);
500 bits += 32;
501 }
502
503
504 cvmx_helper_qlm_jtag_update(qlm);
505
506
507
508
509
510
511 udelay(1000);
512}
513
514
515
516
517
518
519void __cvmx_qlm_speed_tweak(void)
520{
521 cvmx_mio_qlmx_cfg_t qlm_cfg;
522 int num_qlms = cvmx_qlm_get_num();
523 int qlm;
524
525
526 if (OCTEON_IS_MODEL(OCTEON_CN68XX_PASS2_X)) {
527 for (qlm = 0; qlm < num_qlms; qlm++) {
528 int ir50dac;
529
530
531
532
533
534 if (cvmx_qlm_get_gbaud_mhz(qlm) == 6250) {
535#ifdef CVMX_QLM_DUMP_STATE
536 debug("%s:%d: QLM%d: Applying workaround for Errata G-16467\n",
537 __func__, __LINE__, qlm);
538 cvmx_qlm_display_registers(qlm);
539 debug("\n");
540#endif
541 cvmx_qlm_jtag_set(qlm, -1, "cfg_cdr_trunc", 0);
542
543 cvmx_qlm_jtag_set(qlm, -1, "cfg_rst_n_set", 0);
544 cvmx_qlm_jtag_set(qlm, -1, "cfg_rst_n_clr", 1);
545
546 cvmx_qlm_jtag_set(qlm, -1, "cfg_tx_idle_clr", 0);
547 cvmx_qlm_jtag_set(qlm, -1, "cfg_tx_idle_set", 1);
548 if (OCTEON_IS_MODEL(OCTEON_CN68XX_PASS2_0)) {
549 ir50dac = cvmx_qlm_jtag_get(qlm, 0, "ir50dac");
550 while (++ir50dac <= 31)
551 cvmx_qlm_jtag_set(qlm, -1, "ir50dac", ir50dac);
552 }
553 cvmx_qlm_jtag_set(qlm, -1, "div4_byp", 0);
554 cvmx_qlm_jtag_set(qlm, -1, "clkf_byp", 16);
555 cvmx_qlm_jtag_set(qlm, -1, "serdes_pll_byp", 1);
556 cvmx_qlm_jtag_set(qlm, -1, "spdsel_byp", 1);
557#ifdef CVMX_QLM_DUMP_STATE
558 debug("%s:%d: QLM%d: Done applying workaround for Errata G-16467\n",
559 __func__, __LINE__, qlm);
560 cvmx_qlm_display_registers(qlm);
561 debug("\n\n");
562#endif
563
564
565
566
567 }
568 }
569 } else if (OCTEON_IS_MODEL(OCTEON_CN68XX_PASS1_X) ||
570 OCTEON_IS_MODEL(OCTEON_CN66XX_PASS1_X)) {
571
572 for (qlm = 0; qlm < num_qlms; qlm++) {
573
574 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(qlm));
575
576
577 if (qlm_cfg.s.qlm_spd == 5 || qlm_cfg.s.qlm_spd == 12 ||
578 qlm_cfg.s.qlm_spd == 0 || qlm_cfg.s.qlm_spd == 6 ||
579 qlm_cfg.s.qlm_spd == 11) {
580 cvmx_qlm_jtag_set(qlm, -1, "rx_cap_gen2", 0x1);
581 cvmx_qlm_jtag_set(qlm, -1, "rx_eq_gen2", 0x8);
582 }
583 }
584 }
585}
586
587
588
589
590
591
592void __cvmx_qlm_pcie_idle_dac_tweak(void)
593{
594 int num_qlms = 0;
595 int qlm;
596
597 if (OCTEON_IS_MODEL(OCTEON_CN68XX_PASS1_X))
598 num_qlms = 5;
599 else if (OCTEON_IS_MODEL(OCTEON_CN66XX_PASS1_X))
600 num_qlms = 3;
601 else if (OCTEON_IS_MODEL(OCTEON_CN63XX))
602 num_qlms = 3;
603 else
604 return;
605
606
607 for (qlm = 0; qlm < num_qlms; qlm++)
608 cvmx_qlm_jtag_set(qlm, -1, "idle_dac", 0x2);
609}
610
611void __cvmx_qlm_pcie_cfg_rxd_set_tweak(int qlm, int lane)
612{
613 if (OCTEON_IS_MODEL(OCTEON_CN6XXX) || OCTEON_IS_MODEL(OCTEON_CNF71XX))
614 cvmx_qlm_jtag_set(qlm, lane, "cfg_rxd_set", 0x1);
615}
616
617
618
619
620
621
622
623
624
625int cvmx_qlm_get_gbaud_mhz_node(int node, int qlm)
626{
627 cvmx_gserx_lane_mode_t lane_mode;
628 cvmx_gserx_cfg_t cfg;
629
630 if (!octeon_has_feature(OCTEON_FEATURE_MULTINODE))
631 return 0;
632
633 if (qlm >= 8)
634 return -1;
635
636 cfg.u64 = csr_rd_node(node, CVMX_GSERX_CFG(qlm));
637 if (cfg.u64 == 0)
638 return -1;
639 if (cfg.s.pcie) {
640 int pem = 0;
641 cvmx_pemx_cfg_t pemx_cfg;
642
643 switch (qlm) {
644 case 0:
645 pem = 0;
646 break;
647 case 1:
648 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(0));
649 if (pemx_cfg.cn78xx.lanes8)
650 pem = 0;
651 else
652 pem = 1;
653 break;
654 case 2:
655 pem = 2;
656 break;
657 case 3:
658
659 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(2));
660 if (pemx_cfg.cn78xx.lanes8) {
661 pem = 2;
662 } else {
663 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(3));
664 if (pemx_cfg.cn78xx.lanes8)
665 pem = 3;
666 else
667 pem = 2;
668 }
669 break;
670 case 4:
671 pem = 3;
672 break;
673 default:
674 debug("QLM%d: Should be in PCIe mode\n", qlm);
675 break;
676 }
677 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(pem));
678 switch (pemx_cfg.s.md) {
679 case 0:
680 return 2500;
681 case 1:
682 return 5000;
683 case 2:
684 return 8000;
685 default:
686 return 0;
687 }
688 } else {
689 lane_mode.u64 = csr_rd_node(node, CVMX_GSERX_LANE_MODE(qlm));
690 switch (lane_mode.s.lmode) {
691 case R_25G_REFCLK100:
692 return 2500;
693 case R_5G_REFCLK100:
694 return 5000;
695 case R_8G_REFCLK100:
696 return 8000;
697 case R_125G_REFCLK15625_KX:
698 return 1250;
699 case R_3125G_REFCLK15625_XAUI:
700 return 3125;
701 case R_103125G_REFCLK15625_KR:
702 return 10312;
703 case R_125G_REFCLK15625_SGMII:
704 return 1250;
705 case R_5G_REFCLK15625_QSGMII:
706 return 5000;
707 case R_625G_REFCLK15625_RXAUI:
708 return 6250;
709 case R_25G_REFCLK125:
710 return 2500;
711 case R_5G_REFCLK125:
712 return 5000;
713 case R_8G_REFCLK125:
714 return 8000;
715 default:
716 return 0;
717 }
718 }
719}
720
721
722
723
724
725
726
727
728int cvmx_qlm_get_gbaud_mhz(int qlm)
729{
730 if (OCTEON_IS_MODEL(OCTEON_CN63XX)) {
731 if (qlm == 2) {
732 cvmx_gmxx_inf_mode_t inf_mode;
733
734 inf_mode.u64 = csr_rd(CVMX_GMXX_INF_MODE(0));
735 switch (inf_mode.s.speed) {
736 case 0:
737 return 5000;
738 case 1:
739 return 2500;
740 case 2:
741 return 2500;
742 case 3:
743 return 1250;
744 case 4:
745 return 1250;
746 case 5:
747 return 6250;
748 case 6:
749 return 5000;
750 case 7:
751 return 2500;
752 case 8:
753 return 3125;
754 case 9:
755 return 2500;
756 case 10:
757 return 1250;
758 case 11:
759 return 5000;
760 case 12:
761 return 6250;
762 case 13:
763 return 3750;
764 case 14:
765 return 3125;
766 default:
767 return 0;
768 }
769 } else {
770 cvmx_sriox_status_reg_t status_reg;
771
772 status_reg.u64 = csr_rd(CVMX_SRIOX_STATUS_REG(qlm));
773 if (status_reg.s.srio) {
774 cvmx_sriomaintx_port_0_ctl2_t sriomaintx_port_0_ctl2;
775
776 sriomaintx_port_0_ctl2.u32 =
777 csr_rd(CVMX_SRIOMAINTX_PORT_0_CTL2(qlm));
778 switch (sriomaintx_port_0_ctl2.s.sel_baud) {
779 case 1:
780 return 1250;
781 case 2:
782 return 2500;
783 case 3:
784 return 3125;
785 case 4:
786 return 5000;
787 case 5:
788 return 6250;
789 default:
790 return 0;
791 }
792 } else {
793 cvmx_pciercx_cfg032_t pciercx_cfg032;
794
795 pciercx_cfg032.u32 = csr_rd(CVMX_PCIERCX_CFG032(qlm));
796 switch (pciercx_cfg032.s.ls) {
797 case 1:
798 return 2500;
799 case 2:
800 return 5000;
801 case 4:
802 return 8000;
803 default: {
804 cvmx_mio_rst_boot_t mio_rst_boot;
805
806 mio_rst_boot.u64 = csr_rd(CVMX_MIO_RST_BOOT);
807 if (qlm == 0 && mio_rst_boot.s.qlm0_spd == 0xf)
808 return 0;
809
810 if (qlm == 1 && mio_rst_boot.s.qlm1_spd == 0xf)
811 return 0;
812
813
814 return 5000;
815 }
816 }
817 }
818 }
819 } else if (OCTEON_IS_OCTEON2()) {
820 cvmx_mio_qlmx_cfg_t qlm_cfg;
821
822 qlm_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(qlm));
823 switch (qlm_cfg.s.qlm_spd) {
824 case 0:
825 return 5000;
826 case 1:
827 return 2500;
828 case 2:
829 return 2500;
830 case 3:
831 return 1250;
832 case 4:
833 return 1250;
834 case 5:
835 return 6250;
836 case 6:
837 return 5000;
838 case 7:
839 return 2500;
840 case 8:
841 return 3125;
842 case 9:
843 return 2500;
844 case 10:
845 return 1250;
846 case 11:
847 return 5000;
848 case 12:
849 return 6250;
850 case 13:
851 return 3750;
852 case 14:
853 return 3125;
854 default:
855 return 0;
856 }
857 } else if (OCTEON_IS_MODEL(OCTEON_CN70XX)) {
858 cvmx_gserx_dlmx_mpll_multiplier_t mpll_multiplier;
859 u64 meas_refclock;
860 u64 freq;
861
862
863 meas_refclock = cvmx_qlm_measure_clock(qlm);
864
865 mpll_multiplier.u64 = csr_rd(CVMX_GSERX_DLMX_MPLL_MULTIPLIER(qlm, 0));
866 freq = meas_refclock * mpll_multiplier.s.mpll_multiplier;
867 freq = (freq + 500000) / 1000000;
868
869 return freq;
870 } else if (OCTEON_IS_MODEL(OCTEON_CN78XX)) {
871 return cvmx_qlm_get_gbaud_mhz_node(cvmx_get_node_num(), qlm);
872 } else if (OCTEON_IS_MODEL(OCTEON_CN73XX) || OCTEON_IS_MODEL(OCTEON_CNF75XX)) {
873 cvmx_gserx_lane_mode_t lane_mode;
874
875 lane_mode.u64 = csr_rd(CVMX_GSERX_LANE_MODE(qlm));
876 switch (lane_mode.s.lmode) {
877 case R_25G_REFCLK100:
878 return 2500;
879 case R_5G_REFCLK100:
880 return 5000;
881 case R_8G_REFCLK100:
882 return 8000;
883 case R_125G_REFCLK15625_KX:
884 return 1250;
885 case R_3125G_REFCLK15625_XAUI:
886 return 3125;
887 case R_103125G_REFCLK15625_KR:
888 return 10312;
889 case R_125G_REFCLK15625_SGMII:
890 return 1250;
891 case R_5G_REFCLK15625_QSGMII:
892 return 5000;
893 case R_625G_REFCLK15625_RXAUI:
894 return 6250;
895 case R_25G_REFCLK125:
896 return 2500;
897 case R_5G_REFCLK125:
898 return 5000;
899 case R_8G_REFCLK125:
900 return 8000;
901 default:
902 return 0;
903 }
904 }
905 return 0;
906}
907
908static enum cvmx_qlm_mode __cvmx_qlm_get_mode_cn70xx(int qlm)
909{
910 switch (qlm) {
911 case 0:
912 {
913 union cvmx_gmxx_inf_mode inf_mode0, inf_mode1;
914
915 inf_mode0.u64 = csr_rd(CVMX_GMXX_INF_MODE(0));
916 inf_mode1.u64 = csr_rd(CVMX_GMXX_INF_MODE(1));
917
918
919 switch (inf_mode0.s.mode) {
920 case CVMX_GMX_INF_MODE_SGMII:
921 switch (inf_mode1.s.mode) {
922 case CVMX_GMX_INF_MODE_SGMII:
923 return CVMX_QLM_MODE_SGMII_SGMII;
924 case CVMX_GMX_INF_MODE_QSGMII:
925 return CVMX_QLM_MODE_SGMII_QSGMII;
926 default:
927 return CVMX_QLM_MODE_SGMII_DISABLED;
928 }
929 case CVMX_GMX_INF_MODE_QSGMII:
930 switch (inf_mode1.s.mode) {
931 case CVMX_GMX_INF_MODE_SGMII:
932 return CVMX_QLM_MODE_QSGMII_SGMII;
933 case CVMX_GMX_INF_MODE_QSGMII:
934 return CVMX_QLM_MODE_QSGMII_QSGMII;
935 default:
936 return CVMX_QLM_MODE_QSGMII_DISABLED;
937 }
938 case CVMX_GMX_INF_MODE_RXAUI:
939 return CVMX_QLM_MODE_RXAUI_1X2;
940 default:
941 switch (inf_mode1.s.mode) {
942 case CVMX_GMX_INF_MODE_SGMII:
943 return CVMX_QLM_MODE_DISABLED_SGMII;
944 case CVMX_GMX_INF_MODE_QSGMII:
945 return CVMX_QLM_MODE_DISABLED_QSGMII;
946 default:
947 return CVMX_QLM_MODE_DISABLED;
948 }
949 }
950 }
951 case 1:
952 {
953 union cvmx_gserx_sata_cfg sata_cfg;
954 union cvmx_pemx_cfg pem0_cfg;
955
956 sata_cfg.u64 = csr_rd(CVMX_GSERX_SATA_CFG(0));
957 pem0_cfg.u64 = csr_rd(CVMX_PEMX_CFG(0));
958
959 switch (pem0_cfg.cn70xx.md) {
960 case CVMX_PEM_MD_GEN2_2LANE:
961 case CVMX_PEM_MD_GEN1_2LANE:
962 return CVMX_QLM_MODE_PCIE_1X2;
963 case CVMX_PEM_MD_GEN2_1LANE:
964 case CVMX_PEM_MD_GEN1_1LANE:
965 if (sata_cfg.s.sata_en)
966
967 return CVMX_QLM_MODE_PCIE_2X1;
968
969
970 return CVMX_QLM_MODE_PCIE_1X1;
971 case CVMX_PEM_MD_GEN2_4LANE:
972 case CVMX_PEM_MD_GEN1_4LANE:
973 return CVMX_QLM_MODE_PCIE;
974 default:
975 return CVMX_QLM_MODE_DISABLED;
976 }
977 }
978 case 2: {
979 union cvmx_gserx_sata_cfg sata_cfg;
980 union cvmx_pemx_cfg pem0_cfg, pem1_cfg, pem2_cfg;
981
982 sata_cfg.u64 = csr_rd(CVMX_GSERX_SATA_CFG(0));
983 pem0_cfg.u64 = csr_rd(CVMX_PEMX_CFG(0));
984 pem1_cfg.u64 = csr_rd(CVMX_PEMX_CFG(1));
985 pem2_cfg.u64 = csr_rd(CVMX_PEMX_CFG(2));
986
987 if (sata_cfg.s.sata_en)
988 return CVMX_QLM_MODE_SATA_2X1;
989 if (pem0_cfg.cn70xx.md == CVMX_PEM_MD_GEN2_4LANE ||
990 pem0_cfg.cn70xx.md == CVMX_PEM_MD_GEN1_4LANE)
991 return CVMX_QLM_MODE_PCIE;
992 if (pem1_cfg.cn70xx.md == CVMX_PEM_MD_GEN2_2LANE ||
993 pem1_cfg.cn70xx.md == CVMX_PEM_MD_GEN1_2LANE) {
994 return CVMX_QLM_MODE_PCIE_1X2;
995 }
996 if (pem1_cfg.cn70xx.md == CVMX_PEM_MD_GEN2_1LANE ||
997 pem1_cfg.cn70xx.md == CVMX_PEM_MD_GEN1_1LANE) {
998 if (pem2_cfg.cn70xx.md == CVMX_PEM_MD_GEN2_1LANE ||
999 pem2_cfg.cn70xx.md == CVMX_PEM_MD_GEN1_1LANE) {
1000 return CVMX_QLM_MODE_PCIE_2X1;
1001 } else {
1002 return CVMX_QLM_MODE_PCIE_1X1;
1003 }
1004 }
1005 if (pem2_cfg.cn70xx.md == CVMX_PEM_MD_GEN2_1LANE ||
1006 pem2_cfg.cn70xx.md == CVMX_PEM_MD_GEN1_1LANE)
1007 return CVMX_QLM_MODE_PCIE_2X1;
1008 return CVMX_QLM_MODE_DISABLED;
1009 }
1010 default:
1011 return CVMX_QLM_MODE_DISABLED;
1012 }
1013
1014 return CVMX_QLM_MODE_DISABLED;
1015}
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026enum cvmx_qlm_mode cvmx_qlm_get_dlm_mode(int interface_type, int interface)
1027{
1028 switch (interface_type) {
1029 case 0:
1030 {
1031 enum cvmx_qlm_mode qlm_mode = __cvmx_qlm_get_mode_cn70xx(0);
1032
1033 switch (interface) {
1034 case 0:
1035 switch (qlm_mode) {
1036 case CVMX_QLM_MODE_SGMII_SGMII:
1037 case CVMX_QLM_MODE_SGMII_DISABLED:
1038 case CVMX_QLM_MODE_SGMII_QSGMII:
1039 return CVMX_QLM_MODE_SGMII;
1040 case CVMX_QLM_MODE_QSGMII_QSGMII:
1041 case CVMX_QLM_MODE_QSGMII_DISABLED:
1042 case CVMX_QLM_MODE_QSGMII_SGMII:
1043 return CVMX_QLM_MODE_QSGMII;
1044 case CVMX_QLM_MODE_RXAUI_1X2:
1045 return CVMX_QLM_MODE_RXAUI;
1046 default:
1047 return CVMX_QLM_MODE_DISABLED;
1048 }
1049 case 1:
1050 switch (qlm_mode) {
1051 case CVMX_QLM_MODE_SGMII_SGMII:
1052 case CVMX_QLM_MODE_DISABLED_SGMII:
1053 case CVMX_QLM_MODE_QSGMII_SGMII:
1054 return CVMX_QLM_MODE_SGMII;
1055 case CVMX_QLM_MODE_QSGMII_QSGMII:
1056 case CVMX_QLM_MODE_DISABLED_QSGMII:
1057 case CVMX_QLM_MODE_SGMII_QSGMII:
1058 return CVMX_QLM_MODE_QSGMII;
1059 default:
1060 return CVMX_QLM_MODE_DISABLED;
1061 }
1062 default:
1063 return qlm_mode;
1064 }
1065 }
1066 case 1:
1067 {
1068 enum cvmx_qlm_mode qlm_mode1 = __cvmx_qlm_get_mode_cn70xx(1);
1069 enum cvmx_qlm_mode qlm_mode2 = __cvmx_qlm_get_mode_cn70xx(2);
1070
1071 switch (interface) {
1072 case 0:
1073 return qlm_mode1;
1074 case 1:
1075
1076
1077
1078
1079 if (qlm_mode1 == CVMX_QLM_MODE_PCIE_2X1)
1080 return CVMX_QLM_MODE_PCIE_2X1;
1081 else if (qlm_mode2 == CVMX_QLM_MODE_PCIE_1X2 ||
1082 qlm_mode2 == CVMX_QLM_MODE_PCIE_2X1)
1083 return qlm_mode2;
1084 else
1085 return CVMX_QLM_MODE_DISABLED;
1086 case 2:
1087 if (qlm_mode2 == CVMX_QLM_MODE_PCIE_2X1)
1088 return qlm_mode2;
1089 else
1090 return CVMX_QLM_MODE_DISABLED;
1091 default:
1092 return CVMX_QLM_MODE_DISABLED;
1093 }
1094 }
1095 case 2:
1096 {
1097 enum cvmx_qlm_mode qlm_mode = __cvmx_qlm_get_mode_cn70xx(2);
1098
1099 if (qlm_mode == CVMX_QLM_MODE_SATA_2X1)
1100 return CVMX_QLM_MODE_SATA_2X1;
1101 else
1102 return CVMX_QLM_MODE_DISABLED;
1103 }
1104 default:
1105 return CVMX_QLM_MODE_DISABLED;
1106 }
1107}
1108
1109static enum cvmx_qlm_mode __cvmx_qlm_get_mode_cn6xxx(int qlm)
1110{
1111 cvmx_mio_qlmx_cfg_t qlmx_cfg;
1112
1113 if (OCTEON_IS_MODEL(OCTEON_CN68XX)) {
1114 qlmx_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(qlm));
1115
1116 if (qlmx_cfg.s.qlm_spd == 15)
1117 return CVMX_QLM_MODE_DISABLED;
1118
1119 switch (qlmx_cfg.s.qlm_cfg) {
1120 case 0:
1121 return CVMX_QLM_MODE_PCIE;
1122 case 1:
1123 return CVMX_QLM_MODE_ILK;
1124 case 2:
1125 return CVMX_QLM_MODE_SGMII;
1126 case 3:
1127 return CVMX_QLM_MODE_XAUI;
1128 case 7:
1129 return CVMX_QLM_MODE_RXAUI;
1130 default:
1131 return CVMX_QLM_MODE_DISABLED;
1132 }
1133 } else if (OCTEON_IS_MODEL(OCTEON_CN66XX)) {
1134 qlmx_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(qlm));
1135
1136 if (qlmx_cfg.s.qlm_spd == 15)
1137 return CVMX_QLM_MODE_DISABLED;
1138
1139 switch (qlmx_cfg.s.qlm_cfg) {
1140 case 0x9:
1141 return CVMX_QLM_MODE_SGMII;
1142 case 0xb:
1143 return CVMX_QLM_MODE_XAUI;
1144 case 0x0:
1145 case 0x8:
1146 case 0x2:
1147 case 0xa:
1148 return CVMX_QLM_MODE_PCIE;
1149 case 0x1:
1150 case 0x3:
1151 return CVMX_QLM_MODE_SRIO_1X4;
1152 case 0x4:
1153 case 0x6:
1154 return CVMX_QLM_MODE_SRIO_2X2;
1155 case 0x5:
1156 case 0x7:
1157 if (!OCTEON_IS_MODEL(OCTEON_CN66XX_PASS1_0))
1158 return CVMX_QLM_MODE_SRIO_4X1;
1159 fallthrough;
1160 default:
1161 return CVMX_QLM_MODE_DISABLED;
1162 }
1163 } else if (OCTEON_IS_MODEL(OCTEON_CN63XX)) {
1164 cvmx_sriox_status_reg_t status_reg;
1165
1166 if (qlm == 2) {
1167 cvmx_gmxx_inf_mode_t inf_mode;
1168
1169 inf_mode.u64 = csr_rd(CVMX_GMXX_INF_MODE(0));
1170 if (inf_mode.s.speed == 15)
1171 return CVMX_QLM_MODE_DISABLED;
1172 else if (inf_mode.s.mode == 0)
1173 return CVMX_QLM_MODE_SGMII;
1174 else
1175 return CVMX_QLM_MODE_XAUI;
1176 }
1177 status_reg.u64 = csr_rd(CVMX_SRIOX_STATUS_REG(qlm));
1178 if (status_reg.s.srio)
1179 return CVMX_QLM_MODE_SRIO_1X4;
1180 else
1181 return CVMX_QLM_MODE_PCIE;
1182 } else if (OCTEON_IS_MODEL(OCTEON_CN61XX)) {
1183 qlmx_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(qlm));
1184
1185 if (qlmx_cfg.s.qlm_spd == 15)
1186 return CVMX_QLM_MODE_DISABLED;
1187
1188 switch (qlm) {
1189 case 0:
1190 switch (qlmx_cfg.s.qlm_cfg) {
1191 case 0:
1192 return CVMX_QLM_MODE_PCIE;
1193 case 2:
1194 return CVMX_QLM_MODE_SGMII;
1195 case 3:
1196 return CVMX_QLM_MODE_XAUI;
1197 default:
1198 return CVMX_QLM_MODE_DISABLED;
1199 }
1200 break;
1201 case 1:
1202 switch (qlmx_cfg.s.qlm_cfg) {
1203 case 0:
1204 return CVMX_QLM_MODE_PCIE_1X2;
1205 case 1:
1206 return CVMX_QLM_MODE_PCIE_2X1;
1207 default:
1208 return CVMX_QLM_MODE_DISABLED;
1209 }
1210 break;
1211 case 2:
1212 switch (qlmx_cfg.s.qlm_cfg) {
1213 case 2:
1214 return CVMX_QLM_MODE_SGMII;
1215 case 3:
1216 return CVMX_QLM_MODE_XAUI;
1217 default:
1218 return CVMX_QLM_MODE_DISABLED;
1219 }
1220 break;
1221 }
1222 } else if (OCTEON_IS_MODEL(OCTEON_CNF71XX)) {
1223 qlmx_cfg.u64 = csr_rd(CVMX_MIO_QLMX_CFG(qlm));
1224
1225 if (qlmx_cfg.s.qlm_spd == 15)
1226 return CVMX_QLM_MODE_DISABLED;
1227
1228 switch (qlm) {
1229 case 0:
1230 if (qlmx_cfg.s.qlm_cfg == 2)
1231 return CVMX_QLM_MODE_SGMII;
1232 break;
1233 case 1:
1234 switch (qlmx_cfg.s.qlm_cfg) {
1235 case 0:
1236 return CVMX_QLM_MODE_PCIE_1X2;
1237 case 1:
1238 return CVMX_QLM_MODE_PCIE_2X1;
1239 default:
1240 return CVMX_QLM_MODE_DISABLED;
1241 }
1242 break;
1243 }
1244 }
1245 return CVMX_QLM_MODE_DISABLED;
1246}
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256void __cvmx_qlm_set_mult(int qlm, int baud_mhz, int old_multiplier)
1257{
1258 cvmx_gserx_dlmx_mpll_multiplier_t mpll_multiplier;
1259 cvmx_gserx_dlmx_ref_clkdiv2_t clkdiv;
1260 u64 meas_refclock, mult;
1261
1262 if (!OCTEON_IS_MODEL(OCTEON_CN70XX))
1263 return;
1264
1265 if (qlm == -1)
1266 return;
1267
1268 meas_refclock = cvmx_qlm_measure_clock(qlm);
1269 if (meas_refclock == 0) {
1270 printf("DLM%d: Reference clock not running\n", qlm);
1271 return;
1272 }
1273
1274
1275
1276
1277
1278 if (qlm == 0) {
1279 clkdiv.u64 = csr_rd(CVMX_GSERX_DLMX_REF_CLKDIV2(qlm, 0));
1280 if (clkdiv.s.ref_clkdiv2)
1281 baud_mhz *= 2;
1282 }
1283 mult = (uint64_t)baud_mhz * 1000000 + (meas_refclock / 2);
1284 mult /= meas_refclock;
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294 do {
1295 mpll_multiplier.u64 = csr_rd(CVMX_GSERX_DLMX_MPLL_MULTIPLIER(qlm, 0));
1296 mpll_multiplier.s.mpll_multiplier = --old_multiplier;
1297 csr_wr(CVMX_GSERX_DLMX_MPLL_MULTIPLIER(qlm, 0), mpll_multiplier.u64);
1298
1299 udelay(1000);
1300 } while (old_multiplier > (int)mult);
1301}
1302
1303enum cvmx_qlm_mode cvmx_qlm_get_mode_cn78xx(int node, int qlm)
1304{
1305 cvmx_gserx_cfg_t gserx_cfg;
1306 int qlm_mode[2][9] = { { -1, -1, -1, -1, -1, -1, -1, -1 },
1307 { -1, -1, -1, -1, -1, -1, -1, -1 } };
1308
1309 if (qlm >= 8)
1310 return CVMX_QLM_MODE_OCI;
1311
1312 if (qlm_mode[node][qlm] != -1)
1313 return qlm_mode[node][qlm];
1314
1315 gserx_cfg.u64 = csr_rd_node(node, CVMX_GSERX_CFG(qlm));
1316 if (gserx_cfg.s.pcie) {
1317 switch (qlm) {
1318 case 0:
1319 case 1:
1320 {
1321 cvmx_pemx_cfg_t pemx_cfg;
1322
1323 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(0));
1324 if (pemx_cfg.cn78xx.lanes8) {
1325
1326 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE_1X8;
1327 } else {
1328
1329 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE;
1330 }
1331 break;
1332 }
1333 case 2:
1334 {
1335 cvmx_pemx_cfg_t pemx_cfg;
1336
1337 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(2));
1338 if (pemx_cfg.cn78xx.lanes8) {
1339
1340 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE_1X8;
1341 } else {
1342
1343 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE;
1344 }
1345 break;
1346 }
1347 case 3:
1348 {
1349 cvmx_pemx_cfg_t pemx_cfg;
1350
1351 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(2));
1352 if (pemx_cfg.cn78xx.lanes8) {
1353
1354 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE_1X8;
1355 }
1356
1357
1358 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(3));
1359 if (pemx_cfg.cn78xx.lanes8) {
1360
1361 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE_1X8;
1362 } else {
1363
1364 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE;
1365 }
1366 break;
1367 }
1368 case 4:
1369 {
1370 cvmx_pemx_cfg_t pemx_cfg;
1371
1372 pemx_cfg.u64 = csr_rd_node(node, CVMX_PEMX_CFG(3));
1373 if (pemx_cfg.cn78xx.lanes8) {
1374
1375 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE_1X8;
1376 } else {
1377
1378 qlm_mode[node][qlm] = CVMX_QLM_MODE_PCIE;
1379 }
1380 break;
1381 }
1382 default:
1383 qlm_mode[node][qlm] = CVMX_QLM_MODE_DISABLED;
1384 break;
1385 }
1386 } else if (gserx_cfg.s.ila) {
1387 qlm_mode[node][qlm] = CVMX_QLM_MODE_ILK;
1388 } else if (gserx_cfg.s.bgx) {
1389 cvmx_bgxx_cmrx_config_t cmr_config;
1390 cvmx_bgxx_spux_br_pmd_control_t pmd_control;
1391 int bgx = (qlm < 2) ? qlm : qlm - 2;
1392
1393 cmr_config.u64 = csr_rd_node(node, CVMX_BGXX_CMRX_CONFIG(0, bgx));
1394 pmd_control.u64 = csr_rd_node(node, CVMX_BGXX_SPUX_BR_PMD_CONTROL(0, bgx));
1395
1396 switch (cmr_config.s.lmac_type) {
1397 case 0:
1398 qlm_mode[node][qlm] = CVMX_QLM_MODE_SGMII;
1399 break;
1400 case 1:
1401 qlm_mode[node][qlm] = CVMX_QLM_MODE_XAUI;
1402 break;
1403 case 2:
1404 qlm_mode[node][qlm] = CVMX_QLM_MODE_RXAUI;
1405 break;
1406 case 3:
1407
1408
1409
1410
1411 if (pmd_control.s.train_en)
1412 qlm_mode[node][qlm] = CVMX_QLM_MODE_10G_KR;
1413 else
1414 qlm_mode[node][qlm] = CVMX_QLM_MODE_XFI;
1415 break;
1416 case 4:
1417
1418
1419
1420
1421 if (pmd_control.s.train_en)
1422 qlm_mode[node][qlm] = CVMX_QLM_MODE_40G_KR4;
1423 else
1424 qlm_mode[node][qlm] = CVMX_QLM_MODE_XLAUI;
1425 break;
1426 default:
1427 qlm_mode[node][qlm] = CVMX_QLM_MODE_DISABLED;
1428 break;
1429 }
1430 } else {
1431 qlm_mode[node][qlm] = CVMX_QLM_MODE_DISABLED;
1432 }
1433
1434 return qlm_mode[node][qlm];
1435}
1436
1437enum cvmx_qlm_mode __cvmx_qlm_get_mode_cn73xx(int qlm)
1438{
1439 cvmx_gserx_cfg_t gserx_cfg;
1440 int qlm_mode[7] = { -1, -1, -1, -1, -1, -1, -1 };
1441
1442 if (qlm_mode[qlm] != -1)
1443 return qlm_mode[qlm];
1444
1445 if (qlm > 6) {
1446 debug("Invalid QLM(%d) passed\n", qlm);
1447 return -1;
1448 }
1449
1450 gserx_cfg.u64 = csr_rd(CVMX_GSERX_CFG(qlm));
1451 if (gserx_cfg.s.pcie) {
1452 cvmx_pemx_cfg_t pemx_cfg;
1453
1454 switch (qlm) {
1455 case 0:
1456 case 1:
1457 {
1458 pemx_cfg.u64 = csr_rd(CVMX_PEMX_CFG(0));
1459 if (pemx_cfg.cn78xx.lanes8) {
1460
1461 qlm_mode[qlm] = CVMX_QLM_MODE_PCIE_1X8;
1462 } else {
1463
1464 qlm_mode[qlm] = CVMX_QLM_MODE_PCIE;
1465 }
1466 break;
1467 }
1468 case 2:
1469 {
1470 pemx_cfg.u64 = csr_rd(CVMX_PEMX_CFG(2));
1471 if (pemx_cfg.cn78xx.lanes8) {
1472
1473 qlm_mode[qlm] = CVMX_QLM_MODE_PCIE_1X8;
1474 } else {
1475
1476 qlm_mode[qlm] = CVMX_QLM_MODE_PCIE;
1477 }
1478 break;
1479 }
1480 case 5:
1481 case 6:
1482 qlm_mode[qlm] = CVMX_QLM_MODE_PCIE_1X2;
1483 break;
1484 case 3:
1485 {
1486 pemx_cfg.u64 = csr_rd(CVMX_PEMX_CFG(2));
1487 if (pemx_cfg.cn78xx.lanes8) {
1488
1489 qlm_mode[qlm] = CVMX_QLM_MODE_PCIE_1X8;
1490 } else {
1491
1492 qlm_mode[qlm] = CVMX_QLM_MODE_PCIE;
1493 }
1494 break;
1495 }
1496 default:
1497 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1498 break;
1499 }
1500 } else if (gserx_cfg.s.bgx) {
1501 cvmx_bgxx_cmrx_config_t cmr_config;
1502 cvmx_bgxx_cmr_rx_lmacs_t bgx_cmr_rx_lmacs;
1503 cvmx_bgxx_spux_br_pmd_control_t pmd_control;
1504 int bgx = 0;
1505 int start = 0, end = 4, index;
1506 int lane_mask = 0, train_mask = 0;
1507 int mux = 0;
1508
1509 if (qlm < 4) {
1510 bgx = qlm - 2;
1511 } else if (qlm == 5 || qlm == 6) {
1512 bgx = 2;
1513 mux = cvmx_qlm_mux_interface(bgx);
1514 if (mux == 0) {
1515 start = 0;
1516 end = 4;
1517 } else if (mux == 1) {
1518 start = 0;
1519 end = 2;
1520 } else if (mux == 2) {
1521 start = 2;
1522 end = 4;
1523 } else {
1524 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1525 return qlm_mode[qlm];
1526 }
1527 }
1528
1529 for (index = start; index < end; index++) {
1530 cmr_config.u64 = csr_rd(CVMX_BGXX_CMRX_CONFIG(index, bgx));
1531 pmd_control.u64 = csr_rd(CVMX_BGXX_SPUX_BR_PMD_CONTROL(index, bgx));
1532 lane_mask |= (cmr_config.s.lmac_type << (index * 4));
1533 train_mask |= (pmd_control.s.train_en << (index * 4));
1534 }
1535
1536
1537 if (mux == 2)
1538 bgx_cmr_rx_lmacs.u64 = csr_rd(CVMX_BGXX_CMR_RX_LMACS(2));
1539 switch (lane_mask) {
1540 case 0:
1541 if (mux == 1) {
1542 qlm_mode[qlm] = CVMX_QLM_MODE_SGMII_2X1;
1543 } else if (mux == 2) {
1544 qlm_mode[qlm] = CVMX_QLM_MODE_SGMII_2X1;
1545 bgx_cmr_rx_lmacs.s.lmacs = 4;
1546 }
1547 qlm_mode[qlm] = CVMX_QLM_MODE_SGMII;
1548 break;
1549 case 0x1:
1550 qlm_mode[qlm] = CVMX_QLM_MODE_XAUI;
1551 break;
1552 case 0x2:
1553 if (mux == 1) {
1554
1555 qlm_mode[qlm] = CVMX_QLM_MODE_RXAUI_1X2;
1556 } else if (mux == 0) {
1557
1558 qlm_mode[qlm] = CVMX_QLM_MODE_MIXED;
1559 } else {
1560 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1561 }
1562 break;
1563 case 0x202:
1564 if (mux == 2) {
1565
1566 qlm_mode[qlm] = CVMX_QLM_MODE_RXAUI_1X2;
1567 bgx_cmr_rx_lmacs.s.lmacs = 4;
1568 } else if (mux == 1) {
1569
1570 qlm_mode[qlm] = CVMX_QLM_MODE_RXAUI_1X2;
1571 } else if (mux == 0) {
1572 qlm_mode[qlm] = CVMX_QLM_MODE_RXAUI;
1573 } else {
1574 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1575 }
1576 break;
1577 case 0x22:
1578 qlm_mode[qlm] = CVMX_QLM_MODE_RXAUI;
1579 break;
1580 case 0x3333:
1581
1582
1583
1584
1585 if (train_mask)
1586 qlm_mode[qlm] = CVMX_QLM_MODE_10G_KR;
1587 else
1588 qlm_mode[qlm] = CVMX_QLM_MODE_XFI;
1589 break;
1590 case 0x4:
1591
1592
1593
1594
1595 if (train_mask)
1596 qlm_mode[qlm] = CVMX_QLM_MODE_40G_KR4;
1597 else
1598 qlm_mode[qlm] = CVMX_QLM_MODE_XLAUI;
1599 break;
1600 case 0x0005:
1601 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_SGMII;
1602 break;
1603 case 0x3335:
1604 if (train_mask)
1605 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_10G_KR;
1606 else
1607 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_XFI;
1608 break;
1609 case 0x45:
1610 if (train_mask)
1611 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_40G_KR4;
1612 else
1613 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_XLAUI;
1614 break;
1615 case 0x225:
1616 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_RXAUI;
1617 break;
1618 case 0x15:
1619 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_XAUI;
1620 break;
1621
1622 case 0x200:
1623 if (mux == 2) {
1624 qlm_mode[qlm] = CVMX_QLM_MODE_RXAUI_1X2;
1625 bgx_cmr_rx_lmacs.s.lmacs = 4;
1626 } else
1627 case 0x205:
1628 case 0x233:
1629 case 0x3302:
1630 case 0x3305:
1631 if (mux == 0)
1632 qlm_mode[qlm] = CVMX_QLM_MODE_MIXED;
1633 else
1634 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1635 break;
1636 case 0x3300:
1637 if (mux == 0) {
1638 qlm_mode[qlm] = CVMX_QLM_MODE_MIXED;
1639 } else if (mux == 2) {
1640 if (train_mask)
1641 qlm_mode[qlm] = CVMX_QLM_MODE_10G_KR_1X2;
1642 else
1643 qlm_mode[qlm] = CVMX_QLM_MODE_XFI_1X2;
1644 bgx_cmr_rx_lmacs.s.lmacs = 4;
1645 } else {
1646 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1647 }
1648 break;
1649 case 0x33:
1650 if (mux == 1 || mux == 2) {
1651 if (train_mask)
1652 qlm_mode[qlm] = CVMX_QLM_MODE_10G_KR_1X2;
1653 else
1654 qlm_mode[qlm] = CVMX_QLM_MODE_XFI_1X2;
1655 if (mux == 2)
1656 bgx_cmr_rx_lmacs.s.lmacs = 4;
1657 } else {
1658 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1659 }
1660 break;
1661 case 0x0035:
1662 if (mux == 0)
1663 qlm_mode[qlm] = CVMX_QLM_MODE_MIXED;
1664 else if (train_mask)
1665 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_10G_KR_1X1;
1666 else
1667 qlm_mode[qlm] = CVMX_QLM_MODE_RGMII_XFI_1X1;
1668 break;
1669 case 0x235:
1670 if (mux == 0)
1671 qlm_mode[qlm] = CVMX_QLM_MODE_MIXED;
1672 else
1673 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1674 break;
1675 default:
1676 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1677 break;
1678 }
1679 if (mux == 2) {
1680 csr_wr(CVMX_BGXX_CMR_RX_LMACS(2), bgx_cmr_rx_lmacs.u64);
1681 csr_wr(CVMX_BGXX_CMR_TX_LMACS(2), bgx_cmr_rx_lmacs.u64);
1682 }
1683 } else if (gserx_cfg.s.sata) {
1684 qlm_mode[qlm] = CVMX_QLM_MODE_SATA_2X1;
1685 } else {
1686 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1687 }
1688
1689 return qlm_mode[qlm];
1690}
1691
1692enum cvmx_qlm_mode __cvmx_qlm_get_mode_cnf75xx(int qlm)
1693{
1694 cvmx_gserx_cfg_t gserx_cfg;
1695 int qlm_mode[9] = { -1, -1, -1, -1, -1, -1, -1 };
1696
1697 if (qlm_mode[qlm] != -1)
1698 return qlm_mode[qlm];
1699
1700 if (qlm > 9) {
1701 debug("Invalid QLM(%d) passed\n", qlm);
1702 return -1;
1703 }
1704
1705 if ((qlm == 2 || qlm == 3) && (OCTEON_IS_MODEL(OCTEON_CNF75XX))) {
1706 cvmx_sriox_status_reg_t status_reg;
1707 int port = (qlm == 2) ? 0 : 1;
1708
1709 status_reg.u64 = csr_rd(CVMX_SRIOX_STATUS_REG(port));
1710
1711 if (status_reg.s.srio)
1712 qlm_mode[qlm] = CVMX_QLM_MODE_SRIO_1X4;
1713 else
1714 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1715 return qlm_mode[qlm];
1716 }
1717
1718 gserx_cfg.u64 = csr_rd(CVMX_GSERX_CFG(qlm));
1719 if (gserx_cfg.s.pcie) {
1720 switch (qlm) {
1721 case 0:
1722 case 1:
1723 {
1724
1725 qlm_mode[qlm] = CVMX_QLM_MODE_PCIE;
1726 break;
1727 }
1728 default:
1729 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1730 break;
1731 }
1732 } else if (gserx_cfg.s.bgx) {
1733 cvmx_bgxx_cmrx_config_t cmr_config;
1734 cvmx_bgxx_spux_br_pmd_control_t pmd_control;
1735 int bgx = 0;
1736 int start = 0, end = 4, index;
1737 int lane_mask = 0, train_mask = 0;
1738 int mux = 0;
1739 cvmx_gserx_cfg_t gser1, gser2;
1740
1741 gser1.u64 = csr_rd(CVMX_GSERX_CFG(4));
1742 gser2.u64 = csr_rd(CVMX_GSERX_CFG(5));
1743 if (gser1.s.bgx && gser2.s.bgx) {
1744 start = 0;
1745 end = 4;
1746 } else if (gser1.s.bgx) {
1747 start = 0;
1748 end = 2;
1749 mux = 1;
1750 } else if (gser2.s.bgx) {
1751 start = 2;
1752 end = 4;
1753 mux = 2;
1754 } else {
1755 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1756 return qlm_mode[qlm];
1757 }
1758
1759 for (index = start; index < end; index++) {
1760 cmr_config.u64 = csr_rd(CVMX_BGXX_CMRX_CONFIG(index, bgx));
1761 pmd_control.u64 = csr_rd(CVMX_BGXX_SPUX_BR_PMD_CONTROL(index, bgx));
1762 lane_mask |= (cmr_config.s.lmac_type << (index * 4));
1763 train_mask |= (pmd_control.s.train_en << (index * 4));
1764 }
1765
1766 switch (lane_mask) {
1767 case 0:
1768 if (mux == 1 || mux == 2)
1769 qlm_mode[qlm] = CVMX_QLM_MODE_SGMII_2X1;
1770 else
1771 qlm_mode[qlm] = CVMX_QLM_MODE_SGMII;
1772 break;
1773 case 0x3300:
1774 if (mux == 0)
1775 qlm_mode[qlm] = CVMX_QLM_MODE_MIXED;
1776 else if (mux == 2)
1777 if (train_mask)
1778 qlm_mode[qlm] = CVMX_QLM_MODE_10G_KR_1X2;
1779 else
1780 qlm_mode[qlm] = CVMX_QLM_MODE_XFI_1X2;
1781 else
1782 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1783 break;
1784 default:
1785 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1786 break;
1787 }
1788 } else {
1789 qlm_mode[qlm] = CVMX_QLM_MODE_DISABLED;
1790 }
1791
1792 return qlm_mode[qlm];
1793}
1794
1795
1796
1797
1798enum cvmx_qlm_mode cvmx_qlm_get_mode(int qlm)
1799{
1800 if (OCTEON_IS_OCTEON2())
1801 return __cvmx_qlm_get_mode_cn6xxx(qlm);
1802 else if (OCTEON_IS_MODEL(OCTEON_CN70XX))
1803 return __cvmx_qlm_get_mode_cn70xx(qlm);
1804 else if (OCTEON_IS_MODEL(OCTEON_CN78XX))
1805 return cvmx_qlm_get_mode_cn78xx(cvmx_get_node_num(), qlm);
1806 else if (OCTEON_IS_MODEL(OCTEON_CN73XX))
1807 return __cvmx_qlm_get_mode_cn73xx(qlm);
1808 else if (OCTEON_IS_MODEL(OCTEON_CNF75XX))
1809 return __cvmx_qlm_get_mode_cnf75xx(qlm);
1810
1811 return CVMX_QLM_MODE_DISABLED;
1812}
1813
1814int cvmx_qlm_measure_clock_cn7xxx(int node, int qlm)
1815{
1816 cvmx_gserx_cfg_t cfg;
1817 cvmx_gserx_refclk_sel_t refclk_sel;
1818 cvmx_gserx_lane_mode_t lane_mode;
1819
1820 if (OCTEON_IS_MODEL(OCTEON_CN73XX)) {
1821 if (node != 0 || qlm >= 7)
1822 return -1;
1823 } else if (OCTEON_IS_MODEL(OCTEON_CN78XX)) {
1824 if (qlm >= 8 || node > 1)
1825 return -1;
1826 } else {
1827 debug("%s: Unsupported OCTEON model\n", __func__);
1828 return -1;
1829 }
1830
1831 cfg.u64 = csr_rd_node(node, CVMX_GSERX_CFG(qlm));
1832
1833 if (cfg.s.pcie) {
1834 refclk_sel.u64 = csr_rd_node(node, CVMX_GSERX_REFCLK_SEL(qlm));
1835 if (refclk_sel.s.pcie_refclk125)
1836 return REF_125MHZ;
1837 else
1838 return REF_100MHZ;
1839 }
1840
1841 lane_mode.u64 = csr_rd_node(node, CVMX_GSERX_LANE_MODE(qlm));
1842 switch (lane_mode.s.lmode) {
1843 case R_25G_REFCLK100:
1844 return REF_100MHZ;
1845 case R_5G_REFCLK100:
1846 return REF_100MHZ;
1847 case R_8G_REFCLK100:
1848 return REF_100MHZ;
1849 case R_125G_REFCLK15625_KX:
1850 return REF_156MHZ;
1851 case R_3125G_REFCLK15625_XAUI:
1852 return REF_156MHZ;
1853 case R_103125G_REFCLK15625_KR:
1854 return REF_156MHZ;
1855 case R_125G_REFCLK15625_SGMII:
1856 return REF_156MHZ;
1857 case R_5G_REFCLK15625_QSGMII:
1858 return REF_156MHZ;
1859 case R_625G_REFCLK15625_RXAUI:
1860 return REF_156MHZ;
1861 case R_25G_REFCLK125:
1862 return REF_125MHZ;
1863 case R_5G_REFCLK125:
1864 return REF_125MHZ;
1865 case R_8G_REFCLK125:
1866 return REF_125MHZ;
1867 default:
1868 return 0;
1869 }
1870}
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880int cvmx_qlm_measure_clock_node(int node, int qlm)
1881{
1882 if (octeon_has_feature(OCTEON_FEATURE_MULTINODE))
1883 return cvmx_qlm_measure_clock_cn7xxx(node, qlm);
1884 else
1885 return cvmx_qlm_measure_clock(qlm);
1886}
1887
1888
1889
1890
1891
1892
1893
1894
1895int cvmx_qlm_measure_clock(int qlm)
1896{
1897 cvmx_mio_ptp_clock_cfg_t ptp_clock;
1898 u64 count;
1899 u64 start_cycle, stop_cycle;
1900 int evcnt_offset = 0x10;
1901 int incr_count = 1;
1902 int ref_clock[16] = { 0 };
1903
1904 if (ref_clock[qlm])
1905 return ref_clock[qlm];
1906
1907 if (OCTEON_IS_OCTEON3() && !OCTEON_IS_MODEL(OCTEON_CN70XX))
1908 return cvmx_qlm_measure_clock_cn7xxx(cvmx_get_node_num(), qlm);
1909
1910 if (OCTEON_IS_MODEL(OCTEON_CN70XX) && qlm == 0) {
1911 cvmx_gserx_dlmx_ref_clkdiv2_t ref_clkdiv2;
1912
1913 ref_clkdiv2.u64 = csr_rd(CVMX_GSERX_DLMX_REF_CLKDIV2(qlm, 0));
1914 if (ref_clkdiv2.s.ref_clkdiv2)
1915 incr_count = 2;
1916 }
1917
1918
1919
1920
1921 ptp_clock.u64 = csr_rd(CVMX_MIO_PTP_CLOCK_CFG);
1922 ptp_clock.s.evcnt_en = 0;
1923 csr_wr(CVMX_MIO_PTP_CLOCK_CFG, ptp_clock.u64);
1924
1925
1926 ptp_clock.u64 = csr_rd(CVMX_MIO_PTP_CLOCK_CFG);
1927 ptp_clock.s.evcnt_edge = 0;
1928 ptp_clock.s.evcnt_in = evcnt_offset + qlm;
1929 csr_wr(CVMX_MIO_PTP_CLOCK_CFG, ptp_clock.u64);
1930
1931
1932 csr_rd(CVMX_MIO_PTP_EVT_CNT);
1933 count = csr_rd(CVMX_MIO_PTP_EVT_CNT);
1934 csr_wr(CVMX_MIO_PTP_EVT_CNT, -count);
1935
1936
1937 csr_wr(CVMX_MIO_PTP_EVT_CNT, 1000000000);
1938
1939
1940 ptp_clock.u64 = csr_rd(CVMX_MIO_PTP_CLOCK_CFG);
1941 ptp_clock.s.evcnt_en = 1;
1942 csr_wr(CVMX_MIO_PTP_CLOCK_CFG, ptp_clock.u64);
1943
1944 start_cycle = get_ticks();
1945
1946 mdelay(50);
1947
1948
1949 csr_rd(CVMX_MIO_PTP_EVT_CNT);
1950 count = csr_rd(CVMX_MIO_PTP_EVT_CNT);
1951 stop_cycle = get_ticks();
1952
1953
1954 ptp_clock.u64 = csr_rd(CVMX_MIO_PTP_CLOCK_CFG);
1955 ptp_clock.s.evcnt_en = 0;
1956 csr_wr(CVMX_MIO_PTP_CLOCK_CFG, ptp_clock.u64);
1957
1958
1959 count = 1000000000 - count;
1960 count *= incr_count;
1961
1962
1963 ref_clock[qlm] = count * gd->cpu_clk / (stop_cycle - start_cycle);
1964
1965 return ref_clock[qlm];
1966}
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977int __cvmx_qlm_rx_equalization(int node, int qlm, int lane)
1978{
1979 cvmx_gserx_phy_ctl_t phy_ctl;
1980 cvmx_gserx_br_rxx_ctl_t rxx_ctl;
1981 cvmx_gserx_br_rxx_eer_t rxx_eer;
1982 cvmx_gserx_rx_eie_detsts_t eie_detsts;
1983 int fail, gbaud, l, lane_mask;
1984 enum cvmx_qlm_mode mode;
1985 int max_lanes = cvmx_qlm_get_lanes(qlm);
1986 cvmx_gserx_lane_mode_t lmode;
1987 cvmx_gserx_lane_px_mode_1_t pmode_1;
1988 int pending = 0;
1989 u64 timeout;
1990
1991
1992 phy_ctl.u64 = csr_rd_node(node, CVMX_GSERX_PHY_CTL(qlm));
1993 if (phy_ctl.s.phy_pd || phy_ctl.s.phy_reset)
1994 return -1;
1995
1996
1997
1998
1999
2000
2001 for (l = 0; l < max_lanes; l++) {
2002 cvmx_gserx_lanex_lbert_cfg_t lbert_cfg;
2003
2004 if (lane != -1 && lane != l)
2005 continue;
2006
2007 lbert_cfg.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_LBERT_CFG(l, qlm));
2008 if (lbert_cfg.s.lbert_pm_en == 1)
2009 return -1;
2010 }
2011
2012
2013 lmode.u64 = csr_rd_node(node, CVMX_GSERX_LANE_MODE(qlm));
2014
2015
2016
2017
2018
2019 pmode_1.u64 = csr_rd_node(node, CVMX_GSERX_LANE_PX_MODE_1(lmode.s.lmode, qlm));
2020 if (pmode_1.s.vma_mm == 1) {
2021#ifdef DEBUG_QLM
2022 debug("N%d:QLM%d: VMA Manual (manual DFE) selected. Not completing Rx equalization\n",
2023 node, qlm);
2024#endif
2025 return 0;
2026 }
2027
2028 if (OCTEON_IS_MODEL(OCTEON_CN78XX)) {
2029 gbaud = cvmx_qlm_get_gbaud_mhz_node(node, qlm);
2030 mode = cvmx_qlm_get_mode_cn78xx(node, qlm);
2031 } else {
2032 gbaud = cvmx_qlm_get_gbaud_mhz(qlm);
2033 mode = cvmx_qlm_get_mode(qlm);
2034 }
2035
2036
2037 if (qlm < 8) {
2038 if (gbaud < 6250)
2039 return 0;
2040 }
2041
2042
2043 if (mode == CVMX_QLM_MODE_PCIE || mode == CVMX_QLM_MODE_PCIE_1X8 ||
2044 mode == CVMX_QLM_MODE_PCIE_1X2 || mode == CVMX_QLM_MODE_PCIE_2X1)
2045 return -1;
2046
2047 fail = 0;
2048
2049
2050
2051
2052
2053
2054 if (lane == -1) {
2055
2056
2057
2058
2059 if (CVMX_WAIT_FOR_FIELD64_NODE(node, CVMX_GSERX_RX_EIE_DETSTS(qlm),
2060 cvmx_gserx_rx_eie_detsts_t, cdrlock, ==,
2061 (1 << max_lanes) - 1, 500)) {
2062#ifdef DEBUG_QLM
2063 eie_detsts.u64 = csr_rd_node(node, CVMX_GSERX_RX_EIE_DETSTS(qlm));
2064 debug("ERROR: %d:QLM%d: CDR Lock not detected for all 4 lanes. CDR_LOCK(0x%x)\n",
2065 node, qlm, eie_detsts.s.cdrlock);
2066#endif
2067 return -1;
2068 }
2069 } else {
2070 if (CVMX_WAIT_FOR_FIELD64_NODE(node, CVMX_GSERX_RX_EIE_DETSTS(qlm),
2071 cvmx_gserx_rx_eie_detsts_t, cdrlock, &, (1 << lane),
2072 500)) {
2073#ifdef DEBUG_QLM
2074 eie_detsts.u64 = csr_rd_node(node, CVMX_GSERX_RX_EIE_DETSTS(qlm));
2075 debug("ERROR: %d:QLM%d: CDR Lock not detected for Lane%d CDR_LOCK(0x%x)\n",
2076 node, qlm, lane, eie_detsts.s.cdrlock);
2077#endif
2078 return -1;
2079 }
2080 }
2081
2082
2083
2084
2085
2086
2087 if (OCTEON_IS_MODEL(OCTEON_CN78XX_PASS1_X) && (lane == -1 || lane == 3)) {
2088
2089 rxx_ctl.u64 = csr_rd_node(node, CVMX_GSERX_BR_RXX_CTL(3, qlm));
2090 rxx_ctl.s.rxt_swm = 1;
2091 csr_wr_node(node, CVMX_GSERX_BR_RXX_CTL(3, qlm), rxx_ctl.u64);
2092
2093
2094 rxx_eer.u64 = csr_rd_node(node, CVMX_GSERX_BR_RXX_EER(3, qlm));
2095 rxx_eer.s.rxt_esv = 0;
2096 csr_wr_node(node, CVMX_GSERX_BR_RXX_EER(3, qlm), rxx_eer.u64);
2097
2098 if (lane == 3) {
2099 rxx_eer.u64 = csr_rd_node(node, CVMX_GSERX_BR_RXX_EER(2, qlm));
2100 rxx_eer.s.rxt_eer = 1;
2101 csr_wr_node(node, CVMX_GSERX_BR_RXX_EER(2, qlm), rxx_eer.u64);
2102 }
2103 }
2104
2105 for (l = 0; l < max_lanes; l++) {
2106 if (lane != -1 && lane != l)
2107 continue;
2108
2109
2110
2111
2112
2113 if (OCTEON_IS_MODEL(OCTEON_CN78XX_PASS1_X) && l == 3) {
2114
2115
2116
2117
2118 pending |= 1 << 3;
2119 continue;
2120 }
2121
2122 rxx_ctl.u64 = csr_rd_node(node, CVMX_GSERX_BR_RXX_CTL(l, qlm));
2123 rxx_ctl.s.rxt_swm = 1;
2124 csr_wr_node(node, CVMX_GSERX_BR_RXX_CTL(l, qlm), rxx_ctl.u64);
2125
2126
2127 rxx_eer.u64 = csr_rd_node(node, CVMX_GSERX_BR_RXX_EER(l, qlm));
2128 rxx_eer.s.rxt_esv = 0;
2129 rxx_eer.s.rxt_eer = 1;
2130 csr_wr_node(node, CVMX_GSERX_BR_RXX_EER(l, qlm), rxx_eer.u64);
2131 pending |= 1 << l;
2132 }
2133
2134
2135
2136
2137
2138 timeout = get_timer(0);
2139
2140 lane_mask = 0;
2141 while (pending) {
2142
2143 for (l = 0; l < max_lanes; l++) {
2144 lane_mask = 1 << l;
2145
2146 if (!(pending & lane_mask))
2147 continue;
2148
2149
2150
2151
2152
2153 eie_detsts.u64 = csr_rd_node(node, CVMX_GSERX_RX_EIE_DETSTS(qlm));
2154 rxx_eer.u64 = csr_rd_node(node, CVMX_GSERX_BR_RXX_EER(l, qlm));
2155
2156
2157
2158
2159
2160
2161 if (!(eie_detsts.s.eiests & eie_detsts.s.cdrlock & lane_mask)) {
2162 fail |= lane_mask;
2163 pending &= ~lane_mask;
2164 } else if (rxx_eer.s.rxt_esv) {
2165 pending &= ~lane_mask;
2166 }
2167 }
2168
2169
2170 if (get_timer(timeout) > 250)
2171 break;
2172 }
2173
2174 lane_mask = 0;
2175
2176 for (l = 0; l < max_lanes; l++) {
2177 if (lane != -1 && lane != l)
2178 continue;
2179
2180 lane_mask = 1 << l;
2181 rxx_eer.u64 = csr_rd_node(node, CVMX_GSERX_BR_RXX_EER(l, qlm));
2182
2183 rxx_ctl.u64 = csr_rd_node(node, CVMX_GSERX_BR_RXX_CTL(l, qlm));
2184 rxx_ctl.s.rxt_swm = 0;
2185 csr_wr_node(node, CVMX_GSERX_BR_RXX_CTL(l, qlm), rxx_ctl.u64);
2186
2187
2188 if (fail & lane_mask) {
2189#ifdef DEBUG_QLM
2190 debug("%d:QLM%d: Lane%d RX equalization lost CDR Lock or entered Electrical Idle\n",
2191 node, qlm, l);
2192#endif
2193 } else if ((pending & lane_mask) || !rxx_eer.s.rxt_esv) {
2194#ifdef DEBUG_QLM
2195 debug("%d:QLM%d: Lane %d RX equalization timeout\n", node, qlm, l);
2196#endif
2197 fail |= 1 << l;
2198 } else {
2199#ifdef DEBUG_QLM
2200 char *dir_label[4] = { "Hold", "Inc", "Dec", "Hold" };
2201#ifdef DEBUG_QLM_RX
2202 cvmx_gserx_lanex_rx_aeq_out_0_t rx_aeq_out_0;
2203 cvmx_gserx_lanex_rx_aeq_out_1_t rx_aeq_out_1;
2204 cvmx_gserx_lanex_rx_aeq_out_2_t rx_aeq_out_2;
2205 cvmx_gserx_lanex_rx_vma_status_0_t rx_vma_status_0;
2206#endif
2207 debug("%d:QLM%d: Lane%d: RX equalization completed.\n", node, qlm, l);
2208 debug(" Tx Direction Hints TXPRE: %s, TXMAIN: %s, TXPOST: %s, Figure of Merit: %d\n",
2209 dir_label[(rxx_eer.s.rxt_esm) & 0x3],
2210 dir_label[((rxx_eer.s.rxt_esm) >> 2) & 0x3],
2211 dir_label[((rxx_eer.s.rxt_esm) >> 4) & 0x3], rxx_eer.s.rxt_esm >> 6);
2212
2213#ifdef DEBUG_QLM_RX
2214 rx_aeq_out_0.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_RX_AEQ_OUT_0(l, qlm));
2215 rx_aeq_out_1.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_RX_AEQ_OUT_1(l, qlm));
2216 rx_aeq_out_2.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_RX_AEQ_OUT_2(l, qlm));
2217 rx_vma_status_0.u64 =
2218 csr_rd_node(node, CVMX_GSERX_LANEX_RX_VMA_STATUS_0(l, qlm));
2219 debug(" DFE Tap1:%lu, Tap2:%ld, Tap3:%ld, Tap4:%ld, Tap5:%ld\n",
2220 (unsigned int long)cvmx_bit_extract(rx_aeq_out_1.u64, 0, 5),
2221 (unsigned int long)cvmx_bit_extract_smag(rx_aeq_out_1.u64, 5, 9),
2222 (unsigned int long)cvmx_bit_extract_smag(rx_aeq_out_1.u64, 10, 14),
2223 (unsigned int long)cvmx_bit_extract_smag(rx_aeq_out_0.u64, 0, 4),
2224 (unsigned int long)cvmx_bit_extract_smag(rx_aeq_out_0.u64, 5, 9));
2225 debug(" Pre-CTLE Gain:%lu, Post-CTLE Gain:%lu, CTLE Peak:%lu, CTLE Pole:%lu\n",
2226 (unsigned int long)cvmx_bit_extract(rx_aeq_out_2.u64, 4, 4),
2227 (unsigned int long)cvmx_bit_extract(rx_aeq_out_2.u64, 0, 4),
2228 (unsigned int long)cvmx_bit_extract(rx_vma_status_0.u64, 2, 4),
2229 (unsigned int long)cvmx_bit_extract(rx_vma_status_0.u64, 0, 2));
2230#endif
2231#endif
2232 }
2233 }
2234
2235 return (fail) ? -1 : 0;
2236}
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247int cvmx_qlm_gser_errata_27882(int node, int qlm, int lane)
2248{
2249 cvmx_gserx_lanex_pcs_ctlifc_0_t clifc0;
2250 cvmx_gserx_lanex_pcs_ctlifc_2_t clifc2;
2251
2252 if (!(OCTEON_IS_MODEL(OCTEON_CN73XX_PASS1_0) || OCTEON_IS_MODEL(OCTEON_CN73XX_PASS1_1) ||
2253 OCTEON_IS_MODEL(OCTEON_CN73XX_PASS1_2) || OCTEON_IS_MODEL(OCTEON_CNF75XX_PASS1_0) ||
2254 OCTEON_IS_MODEL(OCTEON_CN78XX)))
2255 return 0;
2256
2257 if (CVMX_WAIT_FOR_FIELD64_NODE(node, CVMX_GSERX_RX_EIE_DETSTS(qlm),
2258 cvmx_gserx_rx_eie_detsts_t, cdrlock, &,
2259 (1 << lane), 200))
2260 return -1;
2261
2262 clifc0.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_0(lane, qlm));
2263 clifc0.s.cfg_tx_coeff_req_ovrrd_val = 1;
2264 csr_wr_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_0(lane, qlm), clifc0.u64);
2265 clifc2.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_2(lane, qlm));
2266 clifc2.s.cfg_tx_coeff_req_ovrrd_en = 1;
2267 csr_wr_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_2(lane, qlm), clifc2.u64);
2268 clifc2.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_2(lane, qlm));
2269 clifc2.s.ctlifc_ovrrd_req = 1;
2270 csr_wr_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_2(lane, qlm), clifc2.u64);
2271 clifc2.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_2(lane, qlm));
2272 clifc2.s.cfg_tx_coeff_req_ovrrd_en = 0;
2273 csr_wr_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_2(lane, qlm), clifc2.u64);
2274 clifc2.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_2(lane, qlm));
2275 clifc2.s.ctlifc_ovrrd_req = 1;
2276 csr_wr_node(node, CVMX_GSERX_LANEX_PCS_CTLIFC_2(lane, qlm), clifc2.u64);
2277 return 0;
2278}
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289void cvmx_qlm_gser_errata_25992(int node, int qlm)
2290{
2291 int lane;
2292 int num_lanes = cvmx_qlm_get_lanes(qlm);
2293
2294 if (!(OCTEON_IS_MODEL(OCTEON_CN73XX_PASS1_0) || OCTEON_IS_MODEL(OCTEON_CN73XX_PASS1_1) ||
2295 OCTEON_IS_MODEL(OCTEON_CN73XX_PASS1_2) || OCTEON_IS_MODEL(OCTEON_CN78XX_PASS1_X)))
2296 return;
2297
2298 for (lane = 0; lane < num_lanes; lane++) {
2299 cvmx_gserx_lanex_rx_ctle_ctrl_t rx_ctle_ctrl;
2300 cvmx_gserx_lanex_rx_cfg_4_t rx_cfg_4;
2301
2302 rx_ctle_ctrl.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_RX_CTLE_CTRL(lane, qlm));
2303 rx_ctle_ctrl.s.pcs_sds_rx_ctle_bias_ctrl = 3;
2304 csr_wr_node(node, CVMX_GSERX_LANEX_RX_CTLE_CTRL(lane, qlm), rx_ctle_ctrl.u64);
2305
2306 rx_cfg_4.u64 = csr_rd_node(node, CVMX_GSERX_LANEX_RX_CFG_4(lane, qlm));
2307 rx_cfg_4.s.cfg_rx_errdet_ctrl = 0xcd6f;
2308 csr_wr_node(node, CVMX_GSERX_LANEX_RX_CFG_4(lane, qlm), rx_cfg_4.u64);
2309 }
2310}
2311
2312void cvmx_qlm_display_registers(int qlm)
2313{
2314 int num_lanes = cvmx_qlm_get_lanes(qlm);
2315 int lane;
2316 const __cvmx_qlm_jtag_field_t *ptr = cvmx_qlm_jtag_get_field();
2317
2318 debug("%29s", "Field[<stop bit>:<start bit>]");
2319 for (lane = 0; lane < num_lanes; lane++)
2320 debug("\t Lane %d", lane);
2321 debug("\n");
2322
2323 while (ptr && ptr->name) {
2324 debug("%20s[%3d:%3d]", ptr->name, ptr->stop_bit, ptr->start_bit);
2325 for (lane = 0; lane < num_lanes; lane++) {
2326 u64 val;
2327 int tx_byp = 0;
2328
2329
2330
2331
2332
2333 if (strncmp(ptr->name, "biasdrv_", 8) == 0 ||
2334 strncmp(ptr->name, "tcoeff_", 7) == 0) {
2335 tx_byp = cvmx_qlm_jtag_get(qlm, lane, "serdes_tx_byp");
2336 if (tx_byp == 0) {
2337 debug("\t \t");
2338 continue;
2339 }
2340 }
2341 val = cvmx_qlm_jtag_get(qlm, lane, ptr->name);
2342 debug("\t%4llu (0x%04llx)", (unsigned long long)val,
2343 (unsigned long long)val);
2344 }
2345 debug("\n");
2346 ptr++;
2347 }
2348}
2349
2350
2351